So I have a simple program saving video to file using gstreamermm, the c++ bindings for gstreamer. You can see it below. It works but the problem is that when I need to shut down the pipeline, telling the mainloop to quit doesn’t seem to work like it does with other pipelines.
To test, I added a --test flag which is just “fakesrc num-buffers=50000000 ! fakesink”. When I run that pipeline, mainloop->quit() stops it immeidately. When I try the same with Nvidia’s components, it doesn’t (the callback just prints “Got SIGINT” when ctrl+c is pressed) and I have to kill it, resulting in an unplayable file.
Does anybody more familiar with gstreamer than I know what the recommended way would be to cleanly shutdown a pipeline like I have below (nvarguscamerasrc ! video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)NV12, framerate=(fraction)30/1 ! nvv4l2h265enc bitrate=4000000 ! h265parse ! matroskamux ! filesink location=…)
Suggestions in any language would be most welcome. A way to get the camera Element and turn it off, maybe?
#include <iostream>
#include <csignal>
#include <gstreamermm.h>
#include <gstreamermm/message.h>
#include <glibmm.h>
#include <thread>
// main event loop for the program:
Glib::RefPtr<Glib::MainLoop> mainloop;
// a callback to process messages from the Bus, (we need this so the program exits on error or quit)
bool on_bus_message(const Glib::RefPtr<Gst::Bus> &bus, const Glib::RefPtr<Gst::Message> &message) {
switch (message->get_message_type()) {
case Gst::MESSAGE_EOS:
std::cout << "End of stream reached.";
mainloop->quit();
break;
case Gst::MESSAGE_ERROR: {
// parse message (to see how this works, close the window during playback)
auto errmsg = Glib::RefPtr<Gst::MessageError>::cast_static(message);
std::cerr << "Error received from element " << errmsg->get_source()->get_name() << ": "
<< errmsg->parse_error().what() << std::endl;
std::cerr << "Debugging information: " << errmsg->parse_debug() << std::endl;
mainloop->quit();
break;
}
default:
std::cout << "BUS_DEBUG: " << message->get_source()->get_name() << ":"
<< gst_message_type_get_name(static_cast<GstMessageType>(message->get_message_type()))
<< std::endl;
break;
}
return true;
}
void on_SIGINT(int signum) {
std::cout << " Got SIGINT (" << signum << ")" << std::endl;
mainloop->quit();
}
Glib::RefPtr<Gst::Pipeline> create_pipeline(const Glib::ustring& outfile) {
Glib::RefPtr<Gst::Pipeline> pipeline = Gst::Pipeline::create();
// create elements of the pipeline
// pipeline_string = "nvarguscamerasrc ! video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080,"
// " format=(string)NV12, framerate=(fraction)30/1 ! nvv4l2h265enc bitrate=4000000 "
// "! h265parse ! matroskamux ! filesink location=";
// source
Glib::RefPtr<Gst::Element> camera = Gst::ElementFactory::create_element("nvarguscamerasrc", "camera");
// camera->set_property("timeout", 30); // when enabled, mainloop->quit() stops working
camera->set_property("tnr-mode", 2); // high quality temporal noise reduction TODO: test cost
camera->set_property("ee-mode", 0); // edge enhancement off
camera->set_property("aeantibanding", 0); // ae antibanding off
camera->set_property("maxperf", true);
// filter capabilities (set camera settings)
Glib::RefPtr<Gst::Caps> camera_caps = Gst::Caps::create_from_string((Glib::ustring)
"video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)NV12, framerate=(fraction)30/1");
Glib::RefPtr<Gst::CapsFilter> camera_caps_filter = Gst::CapsFilter::create("camera_caps_filter");
camera_caps_filter->set_property("caps", camera_caps);
/*
* gst-launch-1.0 videotestsrc ! capsfilter caps=video/x-raw,format=GRAY8 ! videoconvert ! autovideosink
*
* and this line
*
* gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! autovideosink
*
* are equivalent... that shorthand had me confused for hours
*
* https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer-plugins/html/gstreamer-plugins-capsfilter.html
*/
// h265 encoding
Glib::RefPtr<Gst::Element> encoder = Gst::ElementFactory::create_element("nvv4l2h265enc", "encoder");
encoder->set_property("bitrate", 4000000);
// packaging stream
Glib::RefPtr<Gst::Element> parser = Gst::ElementFactory::create_element("h265parse", "parser");
Glib::RefPtr<Gst::Element> muxer = Gst::ElementFactory::create_element("matroskamux", "muxer");
muxer->set_property("writing-app", (Glib::ustring) "birbcam");
muxer->set_property("min-index-interval", 6e+10); // write an index every minute in case of crash/power loss
// so as not to have to run a video file fixer
// TODO: experiment with values
// sink
Glib::RefPtr<Gst::FileSink> sink = Gst::FileSink::create("sink");
sink->set_property("location", outfile);
// add all elements to the pipeline
try {
pipeline->add(camera)->add(camera_caps_filter)->add(encoder)->add(parser)->add(muxer)->add(sink);
} catch (std::runtime_error &err) {
std::cerr << "Error adding Element:" << err.what() << std::endl;
throw err;
}
// connect all elements in the pipeline
try {
camera->link(camera_caps_filter)->link(encoder)->link(parser)->link(muxer)->link(sink);
} catch (std::runtime_error &err) {
std::cerr << "Error linking Element:" << err.what() << std::endl;
throw err;
}
return pipeline;
}
Glib::RefPtr<Gst::Pipeline> create_fake_pipeline() {
Glib::RefPtr<Gst::Pipeline> pipeline = Gst::Pipeline::create();
Glib::RefPtr<Gst::FakeSrc> fakesrc = Gst::FakeSrc::create();
Glib::RefPtr<Gst::FakeSink> fakesink = Gst::FakeSink::create();
pipeline->add(fakesrc)->add(fakesink);
fakesrc->link(fakesink);
return pipeline;
}
Glib::RefPtr<Gst::Pipeline> create_camera_to_fakesink_pipeline() {
Glib::RefPtr<Gst::Pipeline> pipeline = Gst::Pipeline::create();
Glib::RefPtr<Gst::Element> camera = Gst::ElementFactory::create_element("nvarguscamerasrc", "camera");
// camera->set_property("timeout", 30);
// camera->set_property("tnr-mode", 2);
// camera->set_property("ee-mode", 0);
// camera->set_property("maxperf", true);
Glib::RefPtr<Gst::Caps> camera_caps = Gst::Caps::create_from_string((Glib::ustring)
"video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)NV12, framerate=(fraction)30/1");
Glib::RefPtr<Gst::CapsFilter> camera_caps_filter = Gst::CapsFilter::create("camera_caps_filter");
camera_caps_filter->set_property("caps", camera_caps);
Glib::RefPtr<Gst::FakeSink> fakesink = Gst::FakeSink::create();
pipeline->add(camera)->add(camera_caps_filter)->add(fakesink);
camera->link(camera_caps_filter)->link(fakesink);
return pipeline;
}
int main(int argc, char **argv) {
Glib::RefPtr<Gst::Element> pipeline; // element because Gst::Parse::launch returns Element
if (argc != 2) {
std::cerr << "Length of the arguments needs to be one (the filename)";
return 1;
}
// initialize gstreamer
Gst::init(argc, argv);
// parse arguments and configure pipeline
if (!strncmp(argv[1], "--test-manual", 16)) {
pipeline = create_fake_pipeline();
} else if (!strncmp(argv[1], "--test-camera-to-fakesink", 32)) {
pipeline = create_camera_to_fakesink_pipeline();
} else {
pipeline = create_pipeline((Glib::ustring)argv[1]);
}
// get the Gst::Bus from the pipeline and configure the on_bus_message callback
Glib::RefPtr<Gst::Bus> bus = pipeline->get_bus();
bus->add_watch(sigc::ptr_fun(&on_bus_message));
// set the pipeline to the playing state
pipeline->set_state(Gst::STATE_PLAYING);
// create the main event loop
mainloop = Glib::MainLoop::create();
if (!mainloop) {
std::cerr << "Failed to create main event loop!" << std::endl;
return 1;
}
// connect shutdown signal callback...
std::signal(SIGINT, on_SIGINT);
// run until stopped (blocks here);
mainloop->run();
// shut down cleanly (this will shift the pipeline through all states to NULL)
// https://gstreamer.freedesktop.org/documentation/plugin-development/basics/states.html?gi-language=c
pipeline->set_state(Gst::STATE_NULL);
return 0;
}