I am trying write a Python script on my Jetson Orin that will start an RTSP server and stream the feed from a MIPI camera when a command is received from an Android phone. I can run the following pipeline with test-launch and I can open the stream in my Android app:
nvarguscamerasrc sensor-id=0 ! video/x-raw(memory:NVMM), width=692, height=520 ! videorate ! video/x-raw(memory:NVMM), framerate=30/1 ! nvv4l264enc ! rtph264pay name=pay0 pt=96 config-interval=1
I created my own RTSP server in Python with the following code, if I launch this script as a standalone process I can open the stream in Android:
import sys
import gi
gi.require_version('GstRtspServer', '1.0')
gi.require_version('Gst', '1.0')
from gi.repository import GLib, Gst, GstRtspServer
def startBroadcast(launchString, locationString):
Gst.init(None)
mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()
factory = GstRtspServer.RTSPMediaFactory()
factory.set_launch(launchString)
mounts.add_factory(f"/{locationString}", factory)
server.attach(None)
print(f"stream ready at rtsp://127.0.0.1:8554/{locationString}")
mainloop.run()
if __name__ == "__main__":
for i, arg in enumerate(sys.argv):
if i == 1:
serverLaunchstring = arg
launchString = 'nvarguscamerasrc sensor-id=0 ' \
'! video/x-raw(memory:NVMM),width=692,height=520 ' \
'! videorate ! video/x-raw(memory:NVMM),framerate=30/1 ' \
'! nvv4l2h264enc ' \
'! rtph264pay name=pay0 pt=96 config-interval=1'
locationString = "test"
startBroadcast(launchString, locationString)
That’s all great, but if I run test-launch as a subprocess or if I run the above script using multiprocess.Process, I get the following errors and I cannot open the stream in Android or locally on the Jetson:
(python3:14143): GStreamer-WARNING **: 08:48:25.071: Failed to load plugin '/usr/lib/aarch64-linux-gnu/gstreamer-1.0/libgstnvvidconv.so': /lib/aarch64-linux-gnu/libGLdispatch.so.0: cannot allocate memory in static TLS block
(python3:14143): GStreamer-WARNING **: 08:48:25.077: Failed to load plugin '/usr/lib/aarch64-linux-gnu/gstreamer-1.0/libgstnvvideo4linux2.so': /lib/aarch64-linux-gnu/libGLdispatch.so.0: cannot allocate memory in static TLS block
I have seen these errors before and was able to resolve them using “export LD_PRELOAD=”, but it’s not working in this case.
Here is the Python script I am running:
mport time
import zmq
import subprocess
from base64 import b64encode
import rtspserver
from datetime import datetime
import multiprocessing
class Test_Server:
streamThread = None
selectedCamera = ""
def doStuff(self):
subprocess.check_output("ip neigh > test.txt", shell=True)
ipLine = subprocess.check_output('''grep -risn "192.168.244" ./test.txt''', shell=True)
x = str(ipLine).split()
ipAddr = x[0]
ipAddr = ipAddr[4:]
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.connect("tcp://" + ipAddr + ":2519")
while True:
message = socket.recv_string()
print("Received: " + message)
time.sleep(1)
if message == "Play":
response = "starting camera"
launchString = 'nvarguscamerasrc sensor-id=0 ' \
'! video/x-raw(memory:NVMM),width=692,height=520 ' \
'! videorate ! video/x-raw(memory:NVMM),framerate=30/1 ' \
'! nvv4l2h264enc ' \
'! rtph264pay name=pay0 pt=96 config-interval=1'
locationString = "test"
#subprocess.call(["/home/camgian/Documents/WA/gst-rtsp-server-1.14.1/examples/test-launch", launchString])
self.startStreamThread(launchString, locationString) # USED THIS FOR NON-MIPI CAMS
elif message == "Stop":
response = "stoping camera"
if self.streamThread:
self.streamThread.terminate() # USED THIS FOR NON-MIPI CAMS
self.streamThread = None # USED THIS FOR NON-MIPI CAMS
else:
response = "bad message"
socket.send_string(response)
def startStreamThread(self, launchString, locationString):
self.streamThread = multiprocessing.Process(
target=rtspserver.startBroadcast, args=(launchString, locationString), daemon=True)
self.streamThread.start()
if __name__ == "__main__":
testServer = Test_Server()
testServer.doStuff()
Is this some threading issue or something else I don’t understand?
Any help would be appreciated.