RTSP video feed and recording video with Gstreamer and Nvidia AGX Orin

I am trying to setup a gstreamer pipeline via gstreamer python bindings. The pipeline should use the camera to record footage to the files (and roll them), it should also support clients connecting to the device to view them live with RTSP. While the pipeline writes to the file, it only displays 1 to 2 seconds before the stream freezes. I’ve tried variety of changes to the script but I’m not able to get it to work beyond few frames. Here is my code

import gi
import os
import signal
import sys

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GLib

Gst.init(None)

def signal_handler(sig, frame):
    print("Caught signal, cleaning up and stopping the main loop.")
    mainloop.quit()

def on_new_sample(appsink, appsrc):
    sample = appsink.emit("pull-sample")
    if sample:
        appsrc.emit("push-sample", sample)
    return Gst.FlowReturn.OK

BITRATE = 2000000

main_pipeline_str = f"""
v4l2src device=/dev/video7 do-timestamp=true ! video/x-raw, format=YUY2, width=1920, height=1080, framerate=30/1 ! nvvidconv ! video/x-raw(memory:NVMM) ! nvv4l2h264enc bitrate={BITRATE} preset-level=4 insert-sps-pps=true ! tee name=t
t. ! h264parse ! splitmuxsink location=/external/video_%05d.mkv max-size-time=100000000000 max-files=10
t. ! h264parse ! appsink name=appsink emit-signals=true sync=false
"""

main_pipeline = Gst.parse_launch(main_pipeline_str)
main_pipeline.set_state(Gst.State.PLAYING)
appsink = main_pipeline.get_by_name("appsink")

def error_cb(bus, msg):
    err, debug_info = msg.parse_error()
    print(f"Error: {err.message}")

bus = main_pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message::error", error_cb)

class TestRtspMediaFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, appsink):
        GstRtspServer.RTSPMediaFactory.__init__(self)
        self.appsink = appsink

    def do_create_element(self, url):
        print("New client has entered!")
        pipeline_str = "appsrc name=appsrc is-live=true do-timestamp=true ! h264parse ! rtph264pay config-interval=1 pt=96 name=pay0"
        pipeline = Gst.parse_launch(pipeline_str)
        appsrc = pipeline.get_by_name("appsrc")
        self.appsink.connect("new-sample", on_new_sample, appsrc)
        return pipeline

rtsp_server = GstRtspServer.RTSPServer()
rtsp_server.set_service("8557")
factory = TestRtspMediaFactory(appsink)
factory.set_shared(True)
mount_points = rtsp_server.get_mount_points()
mount_points.add_factory("/camera", factory)
rtsp_server.attach(None)

signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)

mainloop = GLib.MainLoop()
mainloop.run()

Sorry for the late response, is this still an issue to support? Thanks

Hi,
For python programming, would need other users to share experience.

For C, please try this sample:
Using RTSP encode (with H264) and stream images at device memory on Xavier - #6 by Appletree

See if you can build/run this reference sample successfully first. And try to develop your use-case based on it.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.