How to use appsrc to input realsense color frame into pipeline in python

Please provide complete information as applicable to your setup.

**• Hardware Platform :Jetson Orin NX
**• DeepStream Version:7.0
**• JetPack Version :6.0

This is my key code:
# set appsrc elements
caps_string = “video/x-raw, format=BGR8, width=640, height=480, framerate=30/1”
source.set_property(“is-live”, True)
source.set_property(“format”, Gst.Format.TIME)
source.set_property(“caps”, Gst.caps_from_string(caps_string))
source.connect(“need-data”, on_need_data)

def on_need_data(source, user_data):
frames = pipeline_rs.wait_for_frames()
color_frame = frames.get_color_frame()
# color_image = np.asanyarray(color_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
# color_image = np.array(color_image, dtype=np.uint8)
# color_image = (color_image - 128) * 128

if color_frame:
    buf = Gst.Buffer.new_wrapped_bytes(GLib.Bytes.new(color_image.tobytes()))
    # buf.fill(color_image.tobytes(), int(width*height))
    print("******")
    flag = source.emit("push-buffer", buf)
    time.sleep(0.3)
    # source.gst_app_src_push_buffer(buf)
    print("123456")
    if flag != Gst.FlowReturn.OK:
        print("error")

When I run the code,it report"Error: gst-stream-error-quark: Internal data stream error. (1): …/libs/gst/base/gstbasesrc.c(3127): gst_base_src_loop (): /GstPipeline:pipeline0/GstAppSrc:realsense-source:
streaming stopped, reason not-negotiated (-4)".

I want to know what’s wrong and how to deal with this.

Hi
How does the appsrc pipeline look like? To what did you link it?

Regards,
Allan Navarro

Embedded SW Engineer at RidgeRun

Contact us: support@ridgerun.com
Developers wiki: https://developer.ridgerun.com/
Website: www.ridgerun.com

Hi Allan Navarro!
There is the whole code:
import pyrealsense2 as rs
import cv2
import numpy as np
import time

import sys
sys.path.append(‘…/’)
from apps.common.platform_info import PlatformInfo
from apps.common.bus_call import bus_call

import pyds
import gi
gi.require_version(‘Gst’,‘1.0’)
from gi.repository import Gst,GLib

print(“moudles are ready!”)

def on_need_data(source, user_data):
frames = pipeline_rs.wait_for_frames()
color_frame = frames.get_color_frame()
# color_image = np.asanyarray(color_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
# color_image = np.array(color_image, dtype=np.uint8)
# color_image = (color_image - 128) * 128

if color_frame:
    buf = Gst.Buffer.new_wrapped_bytes(GLib.Bytes.new(color_image.tobytes()))
    # buf.fill(color_image.tobytes(), int(width*height))
    print("******")
    flag = source.emit("push-buffer", buf)
    time.sleep(0.3)
    # source.gst_app_src_push_buffer(buf)
    print("123456")
    if flag != Gst.FlowReturn.OK:
        print("error")
    return flag

def main():
# Standard GStreamer initialization
platform_info = PlatformInfo()
Gst.init(None)

# Create gstreamer elements
print("Creating Pipeline \n ")
pipeline_gst = Gst.Pipeline()
if not pipeline_gst:
    sys.stderr.write(" Unable to create Pipeline \n")

print("Creating Source \n ")
source = Gst.ElementFactory.make("appsrc", "realsense-source")
if not source:
    sys.stderr.write(" Unable to create Source \n")

print("Creating Video Converter \n")
vidconv = Gst.ElementFactory.make("videoconvert", "convertor_src1")
if not vidconv:
    sys.stderr.write(" Unable to create videoconvert \n")

print("Creating Video Sink \n")
if platform_info.is_integrated_gpu():
    print("Creating xvimagesink \n")
    videosink = Gst.ElementFactory.make("xvimagesink", "sink")
    if not videosink:
        sys.stderr.write(" Unable to create xvimagesink \n")
else:
    if platform_info.is_platform_aarch64():
        print("Creating xvimagesink2 \n")
        videosink = Gst.ElementFactory.make("xvimagesink", "sink")
    else:
        print("Creating error \n")


# set appsrc elements
caps_string = "video/x-raw, format=BGR8, width=640, height=480, framerate=30/1"
source.set_property("is-live", True)
source.set_property("format", Gst.Format.TIME)
source.set_property("caps", Gst.caps_from_string(caps_string))
source.connect("need-data", on_need_data)


# add elements to pipeline_gst
print("Adding elements to Pipeline \n")
pipeline_gst.add(source)
pipeline_gst.add(vidconv)
# pipeline_gst.add(vidconv2)
pipeline_gst.add(videosink)

# link element in the pipline_gst
source.link(vidconv)
vidconv.link(videosink)

# create an event loop and feed gstreamer bus mesages to it
loop = GLib.MainLoop()
bus = pipeline_gst.get_bus()
bus.add_signal_watch()
bus.connect ("message", bus_call, loop)

# start play back and listen to events
print("Starting pipeline \n")
pipeline_gst.set_state(Gst.State.PLAYING)
try:
    loop.run()
except:
    pass
pipeline_gst.set_state(Gst.State.NULL)

if name == ‘main’:
# create realsense pipeline
pipeline_rs = rs.pipeline()
config = rs.config()

pipeline_wrapper = rs.pipeline_wrapper(pipeline_rs)
pipeline_profile = config.resolve(pipeline_wrapper)
device = pipeline_profile.get_device()
device_product_line = str(device.get_info(rs.camera_info.product_line))

found_rgb = False
for s in device.sensors:
    if s.get_info(rs.camera_info.name) == 'RGB Camera':
        found_rgb = True
        break
if not found_rgb:
    print("******")
    exit(0)

# config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
# config.enable_stream(rs.stream.depth, 1280, 720, rs.format.z16, 30)
# config.enable_stream(rs.stream.color, 1280, 720, rs.format.bgr8, 30)
pipeline_rs.start(config)

sys.exit(main())

pipeline_rs.stop()

I link appsrc to videoconvert.

I think “BGR8” is not a valid format name GStreamer, could you try BGR

Regards,
Allan Navarro

Embedded SW Engineer at RidgeRun

Contact us: support@ridgerun.com
Developers wiki: https://developer.ridgerun.com/
Website: www.ridgerun.com

Thank you!
The issue has been resolved.

1 Like

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.