• Hardware Platform (Jetson / GPU) : NVIDIA Jetson AGX Orin
• DeepStream Version : 6.3
• JetPack Version (valid for Jetson only) : 5.1.2
• TensorRT Version : 8.5.2
• Issue Type( questions, new requirements, bugs) : questions
I would like to recreate this Gstreamer pipeline in Python using DeepStream SDK:
gst-launch-1.0 filesrc location='my_video.h264' ! "video/x-h264, width=1920, height=1080, framerate=60/1" ! h264parse ! nvv4l2decoder | nvstreammux width=1920 height=1080 batch-size=1 batched-push-timeout=4000000 ! nvvidconv ! tee name=t ! nvv4l2h265enc iframeinterval=60 ! h265parse ! splitmuxsink location=h265_data/video_R%02d.h265 max-size-time=1000000000 t. ! videorate ! videoconvert ! 'video/x-raw(memory:NVMM), format=(string)I420, framerate=1/1' ! nvv4l2h264enc iframeinterval=1 ! h264parse ! splitmuxsink location=h264_data/video_h264_P%02d.h264 max-size-time=1000000000
I recreated it in this way:
# Initialize GStreamer
Gst.init(None)
# Create GStreamer pipeline
pipeline = Gst.Pipeline()
if not pipeline:
sys.stderr.write("Unable to create Pipeline\n")
# Source element for reading from file
input_source = create_pipeline_element('filesrc', 'source', 'Source')
input_source.set_property('location', args[1])
# Create caps filter to set the width, height and framerate of the input
input_caps = create_pipeline_element("capsfilter", "input-caps", "Input Caps")
input_caps.set_property("caps", Gst.Caps.from_string("video/x-h264, width=1920, height=1080, framerate=60/1"))
# Create h264 parser
input_parser = create_pipeline_element("h264parse", "h264-parser", "H264 Parser")
# Create nvv4l2decoder
input_decoder = create_pipeline_element("nvv4l2decoder", "nvv4l2-decoder", "Nvv4l2 Decoder")
# Create streammux
streammux = create_pipeline_element("nvstreammux", "streammux", "Streammux")
streammux.set_property("width", 1920)
streammux.set_property("height", 1080)
streammux.set_property("batch-size", 1)
streammux.set_property("batched-push-timeout", 4000000)
# Create tee
tee = create_pipeline_element("tee", "tee", "Main Tee")
# ------------------------------------------------
# Create branch for h265 elements
# Create h265 encoder
h265_encoder = self.__create_element_or_print_err('nvv4l2h265enc', 'h265_encoder', 'h265 Encoder')
h265_encoder.set_property('iframeinterval', 60)
h265_elements.append(h265_encoder)
# Create h265 parser
h265_parser = self.__create_element_or_print_err('h265parse', 'h265_parser', 'h265 Parser')
h265_elements.append(h265_parser)
# Create h265 split muxer sink
h265_muxer = self.__create_element_or_print_err('splitmuxsink', 'h265_muxer', 'h265 Muxer')
h265_muxer.set_property('location', 'h265_data/video_R%02d.h265')
h265_muxer.set_property('max-size-time', 1000000000)
h265_elements.append(h265_muxer)
# ------------------------------------------------
# Create branch for h264 elements
# Create h264 videorate
h264_videorate = self.__create_element_or_print_err('videorate', 'h264_videorate', 'h264 Videorate')
h264_videorate.set_property('rate', 1)
h264_videorate.set_property('max-rate', 1)
h264_elements.append(h264_videorate)
# Create h264 videoconvert
h264_nvvideoconvert = self.__create_element_or_print_err('nvvideoconvert', 'h264_nvvideoconvert', 'h264 Videoconvert')
h264_elements.append(h264_nvvideoconvert)
# Create h264 encoder
h264_encoder = self.__create_element_or_print_err('nvv4l2h264enc', 'h264_encoder', 'h264 Encoder')
h264_encoder.set_property('iframeinterval', 1)
h264_elements.append(h264_encoder)
# Create h264 parser
h264_parser = self.__create_element_or_print_err('h264parse', 'h264_parser', 'h264 Parser')
h264_elements.append(h264_parser)
# Create h264 split muxer sink
h264_muxer = self.__create_element_or_print_err('splitmuxsink', 'h264_muxer', 'h264 Muxer')
h264_muxer.set_property('location', 'h264_data/video_h264_P%02d.h264')
h264_muxer.set_property('max-size-time', 1000000000)
h264_elements.append(h264_muxer)
# ------------------------------------------------
# Add elements to pipeline
pipeline.add(input_source)
pipeline.add(input_caps)
pipeline.add(input_parser)
pipeline.add(input_decoder)
pipeline.add(streammux)
pipeline.add(tee)
# Add h265 elements to pipeline
tee.add(h265_encoder)
tee.add(h265_parser)
tee.add(h265_muxer)
# Add h264 elements to pipeline
tee.add(h264_videorate)
tee.add(h264_nvvideoconvert)
tee.add(h264_encoder)
tee.add(h264_parser)
tee.add(h264_muxer)
# ------------------------------------------------
# Link elements in pipeline
input_source.link(input_caps)
input_caps.link(input_parser)
input_parser.link(input_decoder)
input_decoder_srcpad = input_decoder.get_static_pad("src")
if not input_decoder_srcpad:
sys.stderr.write("Unable to get src pad of decoder\n")
streammux_sinkpad = streammux.get_request_pad("sink_0")
if not streammux_sinkpad:
sys.stderr.write("Unable to get sink pad of streammux\n")
input_decoder_srcpad.link(streammux_sinkpad)
streammux.link(tee)
# Link h265 elements
tee.link(h265_encoder)
h265_encoder.link(h265_parser)
h265_parser.link(h265_muxer)
# Link h264 elements
tee.link(h264_videorate)
h264_videorate.link(h264_nvvideoconvert)
h264_nvvideoconvert.link(h264_encoder)
h264_encoder.link(h264_parser)
h264_parser.link(h264_muxer)
# ----------------------------
# Create an event loop and feed gstreamer bus messages to it
print(f"Playing file {args[1]}\n")
loop = GLib.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", bus_call, loop)
# Start pipeline
print("Starting pipeline\n")
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass
# cleanup
pipeline.set_state(Gst.State.NULL)
PROBLEM
The problem that occurs here is in h264 elements branch because of the h264_videorate
which does not generate frames correctly. The code after run generates only one h265 and h264 frame and it does not generate them further, it stops. When I comment out h264_videorate it works fine, frames are generated every second, but the generated h264 elements do not have frame rate of 1.
What is wrong in the h264 pipeline? H265 pipeline works perfectly fine, it generetaes the frames that it should generate but in h264 pipeline it seems to be an error somewhere but I cannot figure out where.
There are also missing caps in h264 pipeline according to this … ! ‘video/x-raw(memory:NVMM), format=(string)I420, framerate=1/1’ ! … but I wanted to set the framerate in videorate which does not work. Is it correct or should I create caps to set format and framerate instead of setting it in videorate?