Deepstream 6.4 green screen with RTSP

Please provide complete information as applicable to your setup.

**• Hardware Platform (Jetson / GPU)**Jetson Orin NX Engineering Reference Developer Kit
• DeepStream Version6.4
**• JetPack Version (valid for Jetson only)**6.0 DP
• TensorRT Version8.6.2.3
• NVIDIA GPU Driver Version (valid for GPU only)
**• Issue Type( questions, new requirements, bugs)**bugs
• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing)
• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)

When I am running deepstream-test5,and I want to use VLC player to watch it,but the screen is green.

my config file:

[application]
enable-perf-measurement=1
perf-measurement-interval-sec=5
#gie-kitti-output-dir=streamscl

[tiled-display]
enable=1
rows=1
columns=1
width=1280
height=720
gpu-id=0
#(0): nvbuf-mem-default - Default memory allocated, specific to particular platform
#(1): nvbuf-mem-cuda-pinned - Allocate Pinned/Host cuda memory, applicable for Tesla
#(2): nvbuf-mem-cuda-device - Allocate Device cuda memory, applicable for Tesla
#(3): nvbuf-mem-cuda-unified - Allocate Unified cuda memory, applicable for Tesla
#(4): nvbuf-mem-surface-array - Allocate Surface Array memory, applicable for Jetson
nvbuf-memory-type=0


[source0]
enable=1
#Type - 1=CameraV4L2 2=URI 3=MultiURI
type=3
uri=file://../../../../../samples/streams/sample_720p_10min.mp4
#uri=rtsp://10.45.30.55:8554/stream
num-sources=1
gpu-id=0
nvbuf-memory-type=0

[source1]
enable=0
#Type - 1=CameraV4L2 2=URI 3=MultiURI
type=3
uri=file://../../../../../samples/streams/sample_1080p_h264.mp4
num-sources=2
gpu-id=0
nvbuf-memory-type=0

[sink0]
enable=1
#Type - 1=FakeSink 2=EglSink 3=File
type=2
sync=1
source-id=0
gpu-id=0
nvbuf-memory-type=0

[sink1]
enable=1
#Type - 1=FakeSink 2=EglSink 3=File 4=UDPSink 5=nvdrmvideosink 6=MsgConvBroker
type=6
msg-conv-config=dstest5_msgconv_sample_config.txt
#(0): PAYLOAD_DEEPSTREAM - Deepstream schema payload
#(1): PAYLOAD_DEEPSTREAM_MINIMAL - Deepstream schema payload minimal
#(256): PAYLOAD_RESERVED - Reserved type
#(257): PAYLOAD_CUSTOM   - Custom schema payload
msg-conv-payload-type=1
msg-broker-proto-lib=/opt/nvidia/deepstream/deepstream/lib/libnvds_kafka_proto.so
#Provide your msg-broker-conn-str here
msg-broker-conn-str=10.45.30.64;9092;test
topic=test
#Optional:
#msg-broker-config=../../deepstream-test4/cfg_kafka.txt

[sink2]
enable=1
#Type - 1=FakeSink 2=EglSink 3=File 4=RTSPStreaming
type=4
#1=h264 2=h265
codec=1
#encoder type 0=Hardware 1=Software
enc-type=1
#sw-preset=1 #for SW enc=(0)None (1)ultrafast (2)superfast (3)veryfast (4)faster
#(5)fast (6)medium (7)slow (8)slower (9)veryslow (10)placebo
sync=0
bitrate=4000000
#H264 Profile - 0=Baseline 2=Main 4=High
#H265 Profile - 0=Main 1=Main10
# set profile only for hw encoder, sw encoder selects profile based on sw-preset
#profile=0
# set below properties in case of RTSPStreaming
rtsp-port=8554
udp-port=5400


# sink type = 6 by default creates msg converter + broker.
# To use multiple brokers use this group for converter and use
# sink type = 6 with disable-msgconv = 1
[message-converter]
enable=0
msg-conv-config=dstest5_msgconv_sample_config.txt
#(0): PAYLOAD_DEEPSTREAM - Deepstream schema payload
#(1): PAYLOAD_DEEPSTREAM_MINIMAL - Deepstream schema payload minimal
#(256): PAYLOAD_RESERVED - Reserved type
#(257): PAYLOAD_CUSTOM   - Custom schema payload
msg-conv-payload-type=0
# Name of library having custom implementation.
#msg-conv-msg2p-lib=<val>
# Id of component in case only selected message to parse.
#msg-conv-comp-id=<val>

# Configure this group to enable cloud message consumer.
[message-consumer0]
enable=0
proto-lib=/opt/nvidia/deepstream/deepstream/lib/libnvds_kafka_proto.so
conn-str=<host>;<port>
config-file=<broker config file e.g. cfg_kafka.txt>
subscribe-topic-list=<topic1>;<topic2>;<topicN>
# Use this option if message has sensor name as id instead of index (0,1,2 etc.).
#sensor-list-file=dstest5_msgconv_sample_config.txt

[osd]
enable=1
gpu-id=0
border-width=1
text-size=15
text-color=1;1;1;1;
text-bg-color=0.3;0.3;0.3;1
font=Arial
show-clock=0
clock-x-offset=800
clock-y-offset=820
clock-text-size=12
clock-color=1;0;0;0
nvbuf-memory-type=0

[streammux]
gpu-id=0
##Boolean property to inform muxer that sources are live
live-source=0
batch-size=4
##time out in usec, to wait after the first buffer is available
##to push the batch even if the complete batch is not formed
batched-push-timeout=40000
## Set muxer output width and height
width=1080
height=720
##Enable to maintain aspect ratio wrt source, and allow black borders, works
##along with width, height properties
enable-padding=0
nvbuf-memory-type=0
## If set to TRUE, system timestamp will be attached as ntp timestamp
## If set to FALSE, ntp timestamp from rtspsrc, if available, will be attached
# attach-sys-ts-as-ntp=1

[primary-gie]
enable=1
gpu-id=0
batch-size=4
## 0=FP32, 1=INT8, 2=FP16 mode
bbox-border-color0=1;0;0;1
bbox-border-color1=0;1;1;1
bbox-border-color2=0;1;1;1
bbox-border-color3=0;1;0;1
nvbuf-memory-type=0
interval=0
gie-unique-id=1
model-engine-file=../../../../../samples/models/Primary_Detector/resnet18_trafficcamnet.etlt_b4_gpu0_int8.engine
labelfile-path=../../../../../samples/models/Primary_Detector/labels.txt
config-file=../../../../../samples/configs/deepstream-app/config_yee.txt
#infer-raw-output-dir=../../../../../samples/primary_detector_raw_output/

[tracker]
enable=0
# For NvDCF and NvDeepSORT tracker, tracker-width and tracker-height must be a multiple of 32, respectively
tracker-width=960
tracker-height=544
ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so
# ll-config-file required to set different tracker types
# ll-config-file=../../../../../samples/configs/deepstream-app/config_tracker_IOU.yml
# ll-config-file=../../../../../samples/configs/deepstream-app/config_tracker_NvSORT.yml
ll-config-file=../../../../../samples/configs/deepstream-app/config_tracker_NvDCF_perf.yml
# ll-config-file=../../../../../samples/configs/deepstream-app/config_tracker_NvDCF_accuracy.yml
# ll-config-file=../../../../../samples/configs/deepstream-app/config_tracker_NvDeepSORT.yml
gpu-id=0
display-tracking-id=1

[tests]
file-loop=0

terminal;

nvidia@tegra-ubuntu:~/deepstream-6.4/sources/apps/sample_apps/deepstream-agx$ deepstream-test5-app -c configs/test5_yee_kafka.txt -p 1 -t

*** DeepStream: Launched RTSP Streaming at rtsp://localhost:8554/ds-test ***

WARNING: [TRT]: Using an engine plan file across different models of devices is not recommended and is likely to affect performance or even cause errors.
0:00:07.289777870 7600 0xaaaad5e5d2d0 INFO nvinfer gstnvinfer.cpp:682:gst_nvinfer_logger:<primary_gie> NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:2092> [UID = 1]: deserialized trt engine from :/home/nvidia/deepstream-6.4/sources/apps/sample_apps/deepstream-agx/configs/…/…/…/…/…/samples/models/Primary_Detector/resnet18_trafficcamnet.etlt_b4_gpu0_int8.engine
INFO: [Implicit Engine Info]: layers num: 3
0 INPUT kFLOAT input_1 3x544x960
1 OUTPUT kFLOAT output_bbox/BiasAdd 16x34x60
2 OUTPUT kFLOAT output_cov/Sigmoid 4x34x60

0:00:07.689920354 7600 0xaaaad5e5d2d0 INFO nvinfer gstnvinfer.cpp:682:gst_nvinfer_logger:<primary_gie> NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:2195> [UID = 1]: Use deserialized engine model: /home/nvidia/deepstream-6.4/sources/apps/sample_apps/deepstream-agx/configs/…/…/…/…/…/samples/models/Primary_Detector/resnet18_trafficcamnet.etlt_b4_gpu0_int8.engine
0:00:07.713445658 7600 0xaaaad5e5d2d0 INFO nvinfer gstnvinfer_impl.cpp:328:notifyLoadModelStatus:<primary_gie> [UID 1]: Load new model:/home/nvidia/deepstream-6.4/sources/apps/sample_apps/deepstream-agx/configs/…/…/…/…/…/samples/configs/deepstream-app/config_yee.txt sucessfully

Runtime commands:
h: Print this help
q: Quit

p: Pause
r: Resume

NOTE: To expand a source in the 2D tiled display and view object details, left-click on the source.
To go back to the tiled display, right-click anywhere on the window.

Active sources : 0

**PERF: FPS 0 (Avg)
Tue Apr 9 09:18:09 2024
**PERF: 0.00 (0.00)
** INFO: <bus_callback:301>: Pipeline ready

Opening in BLOCKING MODE
NvMMLiteOpen : Block : BlockType = 261
NvMMLiteBlockCreate : Block : BlockType = 261
** INFO: <bus_callback:287>: Pipeline running

mimetype is video/x-raw
WARNING; playback mode used with URI [file:///home/nvidia/deepstream-6.4/sources/apps/sample_apps/deepstream-agx/configs/…/…/…/…/…/samples/streams/sample_720p_10min.mp4] not conforming to timestamp format; check README; using system-time
Active sources : 1
Tue Apr 9 09:18:14 2024
**PERF: 132.58 (131.90)
Active sources : 1
Tue Apr 9 09:18:19 2024
**PERF: 119.87 (125.56)
Active sources : 1
Tue Apr 9 09:18:24 2024
**PERF: 120.12 (123.62)
Active sources : 1
Tue Apr 9 09:18:29 2024
**PERF: 119.85 (122.68)
Active sources : 1
Tue Apr 9 09:18:34 2024
**PERF: 119.99 (122.13)
Active sources : 1
Tue Apr 9 09:18:39 2024
**PERF: 120.12 (121.76)
Active sources : 1
Tue Apr 9 09:18:44 2024
**PERF: 120.03 (121.50)

(deepstream-test5-app:7600): GLib-GObject-WARNING **: 09:18:48.004: g_object_get_is_valid_property: object class ‘GstUDPSrc’ has no property named ‘pt’
Active sources : 1
Tue Apr 9 09:18:49 2024
**PERF: 119.86 (121.31)
Active sources : 1
Tue Apr 9 09:18:54 2024
**PERF: 119.97 (121.16)
Active sources : 1
Tue Apr 9 09:18:59 2024
**PERF: 120.08 (121.04)
Active sources : 1
Tue Apr 9 09:19:04 2024
**PERF: 119.94 (120.95)
Active sources : 1
Tue Apr 9 09:19:09 2024
**PERF: 120.11 (120.86)
Active sources : 1
Tue Apr 9 09:19:14 2024
**PERF: 120.05 (120.80)
Active sources : 1
Tue Apr 9 09:19:19 2024
**PERF: 120.00 (120.74)
Active sources : 1
Tue Apr 9 09:19:24 2024
**PERF: 120.01 (120.69)
Active sources : 1
Tue Apr 9 09:19:29 2024
**PERF: 119.94 (120.64)
q
Quitting
nvstreammux: Successfully handled EOS for source_id=0
App run successful
nvidia@tegra-ubuntu:~/deepstream-6.4/sources/apps/sample_apps/deepstream-agx$

there is a known software encoding bug. please refer to this topic. please use hardware encoding or replace nvvideoconvert plugin with nvvidconv plugin in deepstream-app.

INFO: <create_udpsink_bin:640>: Could not create HW encoder. Falling back to SW encoder

How to replace nvvideoconvert plugin with nvvidconv plugin in deepstream-app?

is the device Orin NX or Orin Nano? NX supports hardware encoding, Nano does not support hardware encoding.
if Orin NX, could you share the result of “gst-inspect-1.0 nvv4l2h264enc” and “ldd /usr/lib/aarch64-linux-gnu/gstreamer-1.0/libgstnvvideo4linux2.so”?

Orin NX

nvidia@tegra-ubuntu:~/deepstream-6.4/sources/apps/sample_apps/deepstream-test5$ gst-inspect-1.0 nvv4l2h264enc
No such element or plugin ‘nvv4l2h264enc’

nvidia@tegra-ubuntu:~/deepstream-6.4/sources/apps/sample_apps/deepstream-test5$ ldd /usr/lib/aarch64-linux-gnu/gstreamer-1.0/libgstnvvideo4linux2.so
linux-vdso.so.1 (0x0000ffff85e40000)
libglib-2.0.so.0 => /lib/aarch64-linux-gnu/libglib-2.0.so.0 (0x0000ffff85c30000)
libgobject-2.0.so.0 => /lib/aarch64-linux-gnu/libgobject-2.0.so.0 (0x0000ffff85bb0000)
libgstallocators-1.0.so.0 => /lib/aarch64-linux-gnu/libgstallocators-1.0.so.0 (0x0000ffff85b90000)
libgstreamer-1.0.so.0 => /lib/aarch64-linux-gnu/libgstreamer-1.0.so.0 (0x0000ffff85a20000)
libgstvideo-1.0.so.0 => /lib/aarch64-linux-gnu/libgstvideo-1.0.so.0 (0x0000ffff85950000)
libgstbase-1.0.so.0 => /lib/aarch64-linux-gnu/libgstbase-1.0.so.0 (0x0000ffff858c0000)
libv4l2.so.0 => /lib/aarch64-linux-gnu/libv4l2.so.0 (0x0000ffff857a0000)
libnvbufsurface.so.1.0.0 => /usr/lib/aarch64-linux-gnu/nvidia/libnvbufsurface.so.1.0.0 (0x0000ffff856d0000)
libnvbufsurftransform.so.1.0.0 => /usr/lib/aarch64-linux-gnu/nvidia/libnvbufsurftransform.so.1.0.0 (0x0000ffff84120000)
libgstnvcustomhelper.so => /usr/lib/aarch64-linux-gnu/nvidia/libgstnvcustomhelper.so (0x0000ffff84100000)
libgstnvdsseimeta.so.1.0.0 => /usr/lib/aarch64-linux-gnu/nvidia/libgstnvdsseimeta.so.1.0.0 (0x0000ffff840e0000)
libc.so.6 => /lib/aarch64-linux-gnu/libc.so.6 (0x0000ffff83f30000)
/lib/ld-linux-aarch64.so.1 (0x0000ffff85e07000)
libpcre.so.3 => /lib/aarch64-linux-gnu/libpcre.so.3 (0x0000ffff83eb0000)
libm.so.6 => /lib/aarch64-linux-gnu/libm.so.6 (0x0000ffff83e10000)
libffi.so.8 => /lib/aarch64-linux-gnu/libffi.so.8 (0x0000ffff83df0000)
libgmodule-2.0.so.0 => /lib/aarch64-linux-gnu/libgmodule-2.0.so.0 (0x0000ffff83dd0000)
libunwind.so.8 => /lib/aarch64-linux-gnu/libunwind.so.8 (0x0000ffff83d90000)
libdw.so.1 => /lib/aarch64-linux-gnu/libdw.so.1 (0x0000ffff83cd0000)
liborc-0.4.so.0 => /lib/aarch64-linux-gnu/liborc-0.4.so.0 (0x0000ffff83c30000)
libv4lconvert.so.0 => /lib/aarch64-linux-gnu/libv4lconvert.so.0 (0x0000ffff83ba0000)
libnvrm_mem.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_mem.so (0x0000ffff83b80000)
libnvrm_surface.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_surface.so (0x0000ffff83b40000)
libnvrm_chip.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_chip.so (0x0000ffff83b20000)
libEGL.so.1 => /lib/aarch64-linux-gnu/libEGL.so.1 (0x0000ffff83af0000)
libnvos.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvos.so (0x0000ffff83ac0000)
libnvbuf_fdmap.so.1.0.0 => /usr/lib/aarch64-linux-gnu/nvidia/libnvbuf_fdmap.so.1.0.0 (0x0000ffff83aa0000)
libnvrm_host1x.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_host1x.so (0x0000ffff83a70000)
libnvvic.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvvic.so (0x0000ffff83a40000)
libcuda.so.1 => /usr/lib/aarch64-linux-gnu/nvidia/libcuda.so.1 (0x0000ffff81d80000)
libstdc++.so.6 => /lib/aarch64-linux-gnu/libstdc++.so.6 (0x0000ffff81b50000)
liblzma.so.5 => /lib/aarch64-linux-gnu/liblzma.so.5 (0x0000ffff81b10000)
libelf.so.1 => /lib/aarch64-linux-gnu/libelf.so.1 (0x0000ffff81ae0000)
libz.so.1 => /lib/aarch64-linux-gnu/libz.so.1 (0x0000ffff81ab0000)
libbz2.so.1.0 => /lib/aarch64-linux-gnu/libbz2.so.1.0 (0x0000ffff81a80000)
libnvsciipc.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvsciipc.so (0x0000ffff81a40000)
libnvsocsys.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvsocsys.so (0x0000ffff81a20000)
libnvrm_sync.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_sync.so (0x0000ffff81a00000)
libGLdispatch.so.0 => /lib/aarch64-linux-gnu/libGLdispatch.so.0 (0x0000ffff81870000)
libnvrm_stream.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_stream.so (0x0000ffff81850000)
libnvcolorutil.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvcolorutil.so (0x0000ffff81820000)
libdl.so.2 => /lib/aarch64-linux-gnu/libdl.so.2 (0x0000ffff81800000)
librt.so.1 => /lib/aarch64-linux-gnu/librt.so.1 (0x0000ffff817e0000)
libpthread.so.0 => /lib/aarch64-linux-gnu/libpthread.so.0 (0x0000ffff817c0000)
libnvrm_gpu.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvrm_gpu.so (0x0000ffff81740000)
libgcc_s.so.1 => /lib/aarch64-linux-gnu/libgcc_s.so.1 (0x0000ffff81710000)
libnvtegrahv.so => /usr/lib/aarch64-linux-gnu/nvidia/libnvtegrahv.so (0x0000ffff816f0000)

seems there is no lib missing. please refer to the “Note” part of point 2 in this link.

I enter this command: $ rm ${HOME}/.cache/gstreamer-1.0/registry.aarch64.bin

but the RTSP output is still green.

do you mean using hardware encoding(enc-type=0), the RTSP output is still green? if yes, need to narrow down step by step.

  1. can you see the normal video by the following command-line? wondering if it is decoding issue. if sample_720p_10min.mp4 is custom file, please also test this file.
gst-launch-1.0  filesrc location=/opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! nv3dsink
  1. if decoding is file. please check if the out.mp4 generated in the following command-line plays well. wondering if hardware encoding is fine.
gst-launch-1.0  filesrc location=/opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 ! qtdemux ! h264parse ! nvv4l2decoder  ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=I420' ! nvv4l2h264enc bitrate=1000000 ! h264parse ! qtmux ! filesink location=./out.mp4

I can’t use hardware encoding,it will show

I use this command,

gst-launch-1.0  filesrc location=/opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! nv3dsink

and it displays normally.

But I use this command

gst-launch-1.0  filesrc location=/opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 ! qtdemux ! h264parse ! nvv4l2decoder  ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=I420' ! nvv4l2h264enc bitrate=1000000 ! h264parse ! qtmux ! filesink location=./out.mp4

it showed WARNING: erroneous pipeline: no element “nvv4l2h264enc”

the issue is still can’t create hardware encoding plugin. please share more log 1.log by the following command-line. Thanks!

export GST_DEBUG=6  && gst-inspect-1.0 nvv4l2h264enc  >1.log 2>1.log  && export GST_DEBUG=1

Hello,this is 1.log.Please check it.
1.log (6.6 MB)

from the log 1.log. there is only nvv4l2decoder and no nvv4l2h264enc. so the device Gstreamer plugin does not support hardware encoding. let 's focus on software encoding.

does the following command-line output a normal video?

gst-launch-1.0  filesrc location=/opt/nvidia/deepstream/deepstream/samples/streams/sample_720p.mp4 ! qtdemux ! h264parse ! nvv4l2decoder  ! \
 nvvidconv ! 'video/x-raw, format=I420' ! x264enc bitrate=1000000 ! h264parse ! qtmux ! filesink location=./out.mp4

if yes, the solution “replace nvvideoconvert with nvvidconv” works. please do some modification for deepstream-app.

  1. modify define NVDS_ELEM_VIDEO_CONV “nvvideoconvert” to define NVDS_ELEM_VIDEO_CONV “nvvidconv” in /opt/nvidia/deepstream/deepstream-6.4/sources/apps/apps-common/includes/deepstream_config.h.
  2. enter /opt/nvidia/deepstream/deepstream/sources/apps/sample_apps/deepstream-test5, rebuild the deepstream-test5 according to readme.
  3. execute ./deepstream-test5-app -c configs/test5_yee_kafka.txt -p 1 -t
1 Like

No,it shows WARNING: erroneous pipeline: no element “nvv4l2h264enc”

sorry. I corrected the last comment. please refer to the my last comment.

Still error,it shows

ERROR: from element /GstPipeline:pipeline0/GstFileSink:filesink0: Could not open file "test.264" for writing. Additional debug info: ../plugins/elements/gstfilesink.c(458): gst_file_sink_open_file (): /GstPipeline:pipeline0/GstFileSink:filesink0: system error: Permission denied ERROR: pipeline doesn't want to preroll. ERROR: from element /GstPipeline:pipeline0/GstFileSink:filesink0: GStreamer error: state change failed and some element failed to post a proper error message with the reason for the failure. Additional debug info: ../libs/gst/base/gstbasesink.c(5878): gst_base_sink_change_state (): /GstPipeline:pipeline0/GstFileSink:filesink0: Failed to start ERROR: pipeline doesn't want to preroll. Failed to set pipeline to PAUSED. Setting pipeline to NULL ... Freeing pipeline ...

please enter to other directories with writing permission. then exec the command-line again. I updated the command-line to output mp4 file because maybe you have no tool to view 264 file.

Now,it can output a normal video.

According to your solution 1. modify define NVDS_ELEM_VIDEO_CONV “nvvideoconvert” to define NVDS_ELEM_VIDEO_CONV “nvvidconv”.
Which file do I need to modify?This?/opt/nvidia/deepstream/deepstream-6.4/sources/apps/apps-common/includes/deepstream_config.h

yes. please modify nvvideoconvert to nvvidconv.

Still not solved,the RTSP output is still green.
This is my config file.
test5_yee_kafka.txt (6.5 KB)
This is deepstream_config.h.

#ifndef __NVGSTDS_CONFIG_H__
#define __NVGSTDS_CONFIG_H__

#ifdef __aarch64__
#define IS_TEGRA
#endif

#define MEMORY_FEATURES "memory:NVMM"

#ifdef IS_TEGRA
#define NVDS_ELEM_SRC_CAMERA_CSI "nvarguscamerasrc"
#else
#define NVDS_ELEM_SRC_CAMERA_CSI "videotestsrc"
#endif
#define NVDS_ELEM_SRC_CAMERA_V4L2 "v4l2src"
#define NVDS_ELEM_SRC_URI "uridecodebin"
#define NVDS_ELEM_SRC_MULTIFILE "multifilesrc"
#define NVDS_ELEM_SRC_ALSA "alsasrc"

#define NVDS_ELEM_DECODEBIN "decodebin"
#define NVDS_ELEM_WAVPARSE "wavparse"

#define NVDS_ELEM_QUEUE "queue"
#define NVDS_ELEM_CAPS_FILTER "capsfilter"
#define NVDS_ELEM_TEE "tee"
#define NVDS_ELEM_IDENTITY "identity"

#define NVDS_ELEM_PREPROCESS "nvdspreprocess"
#define NVDS_ELEM_SECONDARY_PREPROCESS "nvdspreprocess"
#define NVDS_ELEM_PGIE "nvinfer"
#define NVDS_ELEM_SGIE "nvinfer"
#define NVDS_ELEM_NVINFER "nvinfer"
#define NVDS_ELEM_INFER_SERVER "nvinferserver"
#define NVDS_ELEM_INFER_AUDIO "nvinferaudio"
#define NVDS_ELEM_TRACKER "nvtracker"

#define NVDS_ELEM_VIDEO_CONV "nvvidconv"
#define NVDS_ELEM_AUDIO_CONV "audioconvert"
#define NVDS_ELEM_AUDIO_RESAMPLER "audioresample"
#define NVDS_ELEM_STREAM_MUX "nvstreammux"
#define NVDS_ELEM_STREAM_DEMUX "nvstreamdemux"
#define NVDS_ELEM_TILER "nvmultistreamtiler"
#define NVDS_ELEM_OSD "nvdsosd"
#define NVDS_ELEM_SEGVISUAL "nvsegvisual"
#define NVDS_ELEM_DSANALYTICS_ELEMENT "nvdsanalytics"
#define NVDS_ELEM_DSEXAMPLE_ELEMENT "dsexample"

#define NVDS_ELEM_DEWARPER "nvdewarper"
#define NVDS_ELEM_SPOTANALYSIS "nvspot"
#define NVDS_ELEM_NVAISLE "nvaisle"
#define NVDS_ELEM_BBOXFILTER "nvbboxfilter"
#define NVDS_ELEM_MSG_CONV "nvmsgconv"
#define NVDS_ELEM_MSG_BROKER "nvmsgbroker"

#define NVDS_ELEM_SINK_FAKESINK "fakesink"
#define NVDS_ELEM_SINK_FILE "filesink"
#define NVDS_ELEM_SINK_EGL "nveglglessink"
#define NVDS_ELEM_SINK_3D "nv3dsink"
#define NVDS_ELEM_SINK_DRM "nvdrmvideosink"
#define NVDS_ELEM_EGLTRANSFORM "nvegltransform"

#define NVDS_ELEM_MUX_MP4 "qtmux"
#define NVDS_ELEM_MKV "matroskamux"

#define NVDS_ELEM_ENC_H264_HW "nvv4l2h264enc"
#define NVDS_ELEM_ENC_H265_HW "nvv4l2h265enc"
#define NVDS_ELEM_ENC_MPEG4 "avenc_mpeg4"

#define NVDS_ELEM_ENC_H264_SW "x264enc"
#define NVDS_ELEM_ENC_H265_SW "x265enc"

#define MAX_SOURCE_BINS 1024
#define MAX_SINK_BINS (1024)
#define MAX_SECONDARY_GIE_BINS (16)
#define MAX_SECONDARY_PREPROCESS_BINS (16)
#define MAX_MESSAGE_CONSUMERS (16)

#define NVDS_ELEM_NVMULTIURISRCBIN "nvmultiurisrcbin"

#endif

please use make clean && make to rebuild, and please set type=3 first to check if the generated file is file. pleas use ./deepstream-test5-app not deepstream-test5-app.
if still can’t work, please dump the media pipeline by this method to check if nvvidconv is used.