Using "input-tensor-meta=1" on nvtracker-element on Jetson leads to Exception

Please provide complete information as applicable to your setup.

System 1 (dGPU):

• Hardware Platform (Jetson / GPU) NVIDIA RTX 6000 Ada Generation
• DeepStream Version 7.0.0
• JetPack Version (valid for Jetson only)
• TensorRT Version 8.6.1.6
• NVIDIA GPU Driver Version (valid for GPU only) 535.183.01
• Issue Type( questions, new requirements, bugs) Bug

System 2 (Jetson):

• Hardware Platform (Jetson / GPU) Jetson Orin NX 16GB
• DeepStream Version 7.0.0
• JetPack Version (valid for Jetson only) 6.0
• TensorRT Version 8.6.2.3
• NVIDIA GPU Driver Version (valid for GPU only)
• Issue Type( questions, new requirements, bugs) Bug

Hello,

below you’ll find a detection pipeline featuring a nvdspreprocess-element and a nvtracker-element.
This pipeline runs without errors on System 1 (dGPU) as specified above.

However, running this pipeline on System 2 (Jetson) throws the following exception:

!![Exception] [NvMOTContext::processFrame()] motFrame->bufferList[i]->colorFormat != m_Config.perTransformBatchConfig[i].colorFormat
An exception occurred. [NvMOTContext::processFrame()] motFrame->bufferList[i]->colorFormat != m_Config.perTransformBatchConfig[i].colorFormat
gstnvtracker: Low-level tracker lib returned error 1
0:00:05.788979232 19188 0xaaaaf6c92060 WARN                 nvinfer gstnvinfer.cpp:2420:gst_nvinfer_output_loop:<pgie> error: Internal data stream error.
0:00:05.789019296 19188 0xaaaaf6c92060 WARN                 nvinfer gstnvinfer.cpp:2420:gst_nvinfer_output_loop:<pgie> error: streaming stopped, reason error (-5)

When “input-tensor-meta”-property of the nvtracker-element is set to 0 via

pgie.set_property(“input-tensor-meta”, 0)

the exception is not thrown.

How could this issue be solved?
Thank you for your time!

Drawing of pipeline:

Implementation of pipeline:

import os
import sys

import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, Gst

# taken from deepstream-test3
def cb_newpad(decodebin, decoder_src_pad,data):
    print("In cb_newpad\n")
    caps=decoder_src_pad.get_current_caps()
    if not caps:
        caps = decoder_src_pad.query_caps()
    gststruct=caps.get_structure(0)
    gstname=gststruct.get_name()
    source_bin=data
    features=caps.get_features(0)
    print("gstname=",gstname)
    if(gstname.find("video")!=-1):
        print("features=",features)
        if features.contains("memory:NVMM"):
            bin_ghost_pad=source_bin.get_static_pad("src")
            if not bin_ghost_pad.set_target(decoder_src_pad):
                sys.stderr.write("Failed to link decoder src pad to source bin ghost pad\n")
        else:
            sys.stderr.write(" Error: Decodebin did not pick nvidia decoder plugin.\n")


# taken from deepstream-test3
def decodebin_child_added(child_proxy,Object,name,user_data):
    print("Decodebin child added:", name, "\n")
    if(name.find("decodebin") != -1):
        Object.connect("child-added",decodebin_child_added,user_data)
    if "source" in name:
        source_element = child_proxy.get_by_name("source")
        if source_element.find_property("drop-on-latency") != None:
            Object.set_property("drop-on-latency", True)


# taken from deepstream-test3
def create_source_bin(index,uri):
    print("Creating source bin")
    bin_name="source-bin-%02d" %index
    print(bin_name)
    nbin=Gst.Bin.new(bin_name)
    if not nbin:
        sys.stderr.write(" Unable to create source bin \n")
    uri_decode_bin=Gst.ElementFactory.make("uridecodebin", "uri-decode-bin")
    if not uri_decode_bin:
        sys.stderr.write(" Unable to create uri decode bin \n")
    uri_decode_bin.set_property("uri",uri)
    uri_decode_bin.connect("pad-added",cb_newpad,nbin)
    uri_decode_bin.connect("child-added",decodebin_child_added,nbin)
    Gst.Bin.add(nbin,uri_decode_bin)
    bin_pad=nbin.add_pad(Gst.GhostPad.new_no_target("src",Gst.PadDirection.SRC))
    if not bin_pad:
        sys.stderr.write(" Failed to add ghost pad in source bin \n")
        return None
    return nbin


if __name__ == "__main__":
    os.putenv("USE_NEW_NVSTREAMMUX", "yes")

    Gst.init(None)
    pipeline = Gst.Pipeline()

    # ====================================================
    # CREATE ELEMENTS FROM LEFT TO RIGHT
    # ====================================================

    source_bin = create_source_bin(0, "file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4")

    streammux = Gst.ElementFactory.make("nvstreammux", "streammux")
    streammux.set_property("batch-size", 1)

    queue_streammux_to_preprocess = Gst.ElementFactory.make("queue", "queue_streammux_to_preprocess")

    preprocess = Gst.ElementFactory.make("nvdspreprocess", "preprocess")
    preprocess.set_property("config-file", "config_preprocess.txt")

    queue_preprocess_to_pgie = Gst.ElementFactory.make("queue", "queue_preprocess_to_pgie")

    pgie = Gst.ElementFactory.make("nvinfer", "pgie")
    pgie.set_property("config-file-path", "/opt/nvidia/deepstream/deepstream/samples/configs/deepstream-app/config_infer_primary.txt")
    if preprocess.get_property('enable'):
        pgie.set_property("input-tensor-meta", 1)

    queue_pgie_to_tracker = Gst.ElementFactory.make("queue", "queue_pgie_to_tracker")
    
    tracker = Gst.ElementFactory.make("nvtracker", "tracker")
    tracker.set_property("tracker-width", 960)
    tracker.set_property("tracker-height", 544)
    tracker.set_property("ll-lib-file", "/opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so")
    tracker.set_property("ll-config-file",
                        "/opt/nvidia/deepstream/deepstream/samples/configs/deepstream-app/config_tracker_NvDCF_perf.yml")
    if preprocess.get_property('enable'):
      tracker.set_property("input-tensor-meta", 1)
      tracker.set_property("tensor-meta-gie-id", 1)

    queue_tracker_to_multistreamtiler = Gst.ElementFactory.make("queue", "queue_tracker_to_multistreamtiler")

    multistreamtiler = Gst.ElementFactory.make("nvmultistreamtiler", "multistreamtiler")
    multistreamtiler.set_property("rows", 1)
    multistreamtiler.set_property("columns", 1)
    multistreamtiler.set_property("width", 1280)
    multistreamtiler.set_property("height", 720)

    queue_multistreamtiler_to_videoconvert = Gst.ElementFactory.make("queue", "queue_multistreamtiler_to_videoconvert")

    videoconvert = Gst.ElementFactory.make("nvvideoconvert", "videoconvert")

    queue_videoconvert_to_osd = Gst.ElementFactory.make("queue", "queue_videoconvert_to_osd")

    osd = Gst.ElementFactory.make("nvdsosd", "osd")

    queue_osd_to_eglglessink = Gst.ElementFactory.make("queue", "queue_osd_to_eglglessink")

    eglglessink = Gst.ElementFactory.make("nveglglessink", "eglglessink")
    eglglessink.set_property("sync", 1)

    # ====================================================
    # ADD ELEMENTS TO PIPELINE FROM LEFT TO RIGHT
    # ====================================================

    pipeline.add(source_bin)
    pipeline.add(streammux)
    pipeline.add(queue_streammux_to_preprocess)
    pipeline.add(preprocess)
    pipeline.add(queue_preprocess_to_pgie)
    pipeline.add(pgie)
    pipeline.add(queue_pgie_to_tracker)
    pipeline.add(tracker)
    pipeline.add(queue_tracker_to_multistreamtiler)
    pipeline.add(multistreamtiler)
    pipeline.add(queue_multistreamtiler_to_videoconvert)
    pipeline.add(videoconvert)
    pipeline.add(queue_videoconvert_to_osd)
    pipeline.add(osd)
    pipeline.add(queue_osd_to_eglglessink)
    pipeline.add(eglglessink)

    # ====================================================
    # LINK ELEMENTS FROM LEFT TO RIGHT
    # ====================================================

    # source to streammux
    source_bin_src_pad = source_bin.get_static_pad("src")
    streammux_sink_pad = streammux.request_pad_simple("sink_0")
    source_bin_src_pad.link(streammux_sink_pad)

    # streammux to preprocess
    streammux.link(queue_streammux_to_preprocess)
    queue_streammux_to_preprocess.link(preprocess)

    # preprocess to pgie
    preprocess.link(queue_preprocess_to_pgie)
    queue_preprocess_to_pgie.link(pgie)

    # pgie to tracker
    pgie.link(queue_pgie_to_tracker)
    queue_pgie_to_tracker.link(tracker)
    
    # tracker to multistreamtiler
    tracker.link(queue_tracker_to_multistreamtiler)
    queue_tracker_to_multistreamtiler.link(multistreamtiler)

    # multistreamtiler to videoconvert
    multistreamtiler.link(queue_multistreamtiler_to_videoconvert)
    queue_multistreamtiler_to_videoconvert.link(videoconvert)

    # videoconvert to osd
    videoconvert.link(queue_videoconvert_to_osd)
    queue_videoconvert_to_osd.link(osd)

    # osd to eglglessink.
    osd.link(queue_osd_to_eglglessink)
    queue_osd_to_eglglessink.link(eglglessink)

    loop = GLib.MainLoop()
    pipeline.set_state(Gst.State.PLAYING)
    try:
        loop.run()
    except Exception as e:
        print(e)

    pipeline.set_state(Gst.State.NULL)

Configuration file of nvdspreprocess-element:

[property]
enable=1
    # list of component gie-id for which tensor is prepared
target-unique-ids=1
    # 0=NCHW, 1=NHWC, 2=CUSTOM
network-input-order=0
    # 0=process on objects 1=process on frames
process-on-frame=1
    #uniquely identify the metadata generated by this element
unique-id=1
    # gpu-id to be used
gpu-id=0
    # if enabled maintain the aspect ratio while scaling
maintain-aspect-ratio=1
    # if enabled pad symmetrically with maintain-aspect-ratio enabled
symmetric-padding=1
    # processig width/height at which image scaled
processing-width=960
processing-height=544
    # max buffer in scaling buffer pool
scaling-buf-pool-size=6
    # max buffer in tensor buffer pool
tensor-buf-pool-size=6
    # tensor shape based on network-input-order
network-input-shape= 8;3;544;960
    # 0=RGB, 1=BGR, 2=GRAY
network-color-format=0
    # 0=FP32, 1=UINT8, 2=INT8, 3=UINT32, 4=INT32, 5=FP16
tensor-data-type=0
    # tensor name same as input layer name
tensor-name=input_1
    # 0=NVBUF_MEM_DEFAULT 1=NVBUF_MEM_CUDA_PINNED 2=NVBUF_MEM_CUDA_DEVICE 3=NVBUF_MEM_CUDA_UNIFIED
scaling-pool-memory-type=0
    # 0=NvBufSurfTransformCompute_Default 1=NvBufSurfTransformCompute_GPU 2=NvBufSurfTransformCompute_VIC
scaling-pool-compute-hw=0
    # Scaling Interpolation method
    # 0=NvBufSurfTransformInter_Nearest 1=NvBufSurfTransformInter_Bilinear 2=NvBufSurfTransformInter_Algo1
    # 3=NvBufSurfTransformInter_Algo2 4=NvBufSurfTransformInter_Algo3 5=NvBufSurfTransformInter_Algo4
    # 6=NvBufSurfTransformInter_Default
scaling-filter=0
    # custom library .so path having custom functionality
custom-lib-path=/opt/nvidia/deepstream/deepstream/lib/gst-plugins/libcustom2d_preprocess.so
    # custom tensor preparation function name having predefined input/outputs
    # check the default custom library nvdspreprocess_lib for more info
custom-tensor-preparation-function=CustomTensorPreparation

[user-configs]
   # Below parameters get used when using default custom library nvdspreprocess_lib
   # network scaling factor
pixel-normalization-factor=0.003921568
   # mean file path in ppm format
#mean-file=
   # array of offsets for each channel
#offsets=

[group-0]
src-ids=0
custom-input-transformation-function=CustomAsyncTransformation
process-on-roi=1
roi-params-src-0=200;200;640;640

Can you refer and verify “input-tensor-meta=1” on nvtracker with: $ deepstream-app -c source4_1080p_dec_preprocess_infer-resnet_tracker_preprocess_sgie_tiled_display_int8.txt? The configure file is in: deepstream-app -c source4_1080p_dec_preprocess_infer-resnet_tracker_preprocess_sgie_tiled_display_int8.txt.

Thank you for your reply.

Running

deepstream-app -c source4_1080p_dec_preprocess_infer-resnet_tracker_preprocess_sgie_tiled_display_int8.txt

within a deepstream:7.0-triton-multiarch docker environment on System 2 (Jetson) leads to the same exception, see the following output:

root@jetson-orin-nx:/opt/nvidia/deepstream/deepstream-7.0/samples/configs/deepstream-app-triton# deepstream-app -c source4_1080p_dec_preprocess_infer-resnet_tracker_preprocess_sgie_tiled_display_int8.txt
** INFO: <create_primary_gie_bin:144>: gpu-id: 0 in primary-gie group is ignored, only accept in nvinferserver's config
0:00:00.154458656   926 0xaaaad9b09320 WARN           nvinferserver gstnvinferserver_impl.cpp:366:validatePluginConfig:<secondary_gie_1> warning: Configuration file unique-id reset to: 5
0:00:00.154540352   926 0xaaaad9b09320 WARN           nvinferserver gstnvinferserver_impl.cpp:384:validatePluginConfig:<secondary_gie_1> warning: Configuration file process_mode reset to: PROCESS_MODE_CLIP_OBJECTS
INFO: TrtISBackend id:5 initialized model: Secondary_VehicleMake
0:00:00.209798336   926 0xaaaad9b09320 WARN           nvinferserver gstnvinferserver_impl.cpp:384:validatePluginConfig:<secondary_gie_0> warning: Configuration file process_mode reset to: PROCESS_MODE_CLIP_OBJECTS
INFO: TrtISBackend id:4 initialized model: Secondary_VehicleTypes
gstnvtracker: Loading low-level lib at /opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so
[NvMultiObjectTracker] Initialized
gstnvtracker: Forcing format RGBA for tracker 
0:00:00.317673696   926 0xaaaad9b09320 WARN           nvinferserver gstnvinferserver_impl.cpp:360:validatePluginConfig:<primary_gie> warning: Configuration file batch-size reset to: 4
0:00:00.317734368   926 0xaaaad9b09320 WARN           nvinferserver gstnvinferserver_impl.cpp:384:validatePluginConfig:<primary_gie> warning: Configuration file process_mode reset to: PROCESS_MODE_FULL_FRAME
INFO: TrtISBackend id:1 initialized model: Primary_Detector

Runtime commands:
	h: Print this help
	q: Quit

	p: Pause
	r: Resume

NOTE: To expand a source in the 2D tiled display and view object details, left-click on the source.
      To go back to the tiled display, right-click anywhere on the window.

** INFO: <bus_callback:291>: Pipeline ready

WARNING from secondary_gie_1: Configuration file unique-id reset to: 5
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver_impl.cpp(366): validatePluginConfig (): /GstPipeline:pipeline/GstBin:secondary_gie_bin/GstNvInferServer:secondary_gie_1
WARNING from secondary_gie_1: Configuration file process_mode reset to: PROCESS_MODE_CLIP_OBJECTS
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver_impl.cpp(384): validatePluginConfig (): /GstPipeline:pipeline/GstBin:secondary_gie_bin/GstNvInferServer:secondary_gie_1
WARNING from secondary_gie_0: Configuration file process_mode reset to: PROCESS_MODE_CLIP_OBJECTS
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver_impl.cpp(384): validatePluginConfig (): /GstPipeline:pipeline/GstBin:secondary_gie_bin/GstNvInferServer:secondary_gie_0
WARNING from primary_gie: Configuration file batch-size reset to: 4
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver_impl.cpp(360): validatePluginConfig (): /GstPipeline:pipeline/GstBin:primary_gie_bin/GstNvInferServer:primary_gie
WARNING from primary_gie: Configuration file process_mode reset to: PROCESS_MODE_FULL_FRAME
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver_impl.cpp(384): validatePluginConfig (): /GstPipeline:pipeline/GstBin:primary_gie_bin/GstNvInferServer:primary_gie
WARNING from src_elem: No decoder available for type 'audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)119056e500, rate=(int)48000, channels=(int)2'.
Debug info: ../gst/playback/gsturidecodebin.c(960): unknown_type_cb (): /GstPipeline:pipeline/GstBin:multi_src_bin/GstBin:src_sub_bin0/GstURIDecodeBin:src_elem
WARNING from src_elem: No decoder available for type 'audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)119056e500, rate=(int)48000, channels=(int)2'.
Debug info: ../gst/playback/gsturidecodebin.c(960): unknown_type_cb (): /GstPipeline:pipeline/GstBin:multi_src_bin/GstBin:src_sub_bin1/GstURIDecodeBin:src_elem
WARNING from src_elem: No decoder available for type 'audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)119056e500, rate=(int)48000, channels=(int)2'.
Debug info: ../gst/playback/gsturidecodebin.c(960): unknown_type_cb (): /GstPipeline:pipeline/GstBin:multi_src_bin/GstBin:src_sub_bin3/GstURIDecodeBin:src_elem
WARNING from src_elem: No decoder available for type 'audio/mpeg, mpegversion=(int)4, framed=(boolean)true, stream-format=(string)raw, level=(string)2, base-profile=(string)lc, profile=(string)lc, codec_data=(buffer)119056e500, rate=(int)48000, channels=(int)2'.
Debug info: ../gst/playback/gsturidecodebin.c(960): unknown_type_cb (): /GstPipeline:pipeline/GstBin:multi_src_bin/GstBin:src_sub_bin2/GstURIDecodeBin:src_elem
/bin/bash: line 1: lsmod: command not found
/bin/bash: line 1: modprobe: command not found
Opening in BLOCKING MODE 
Opening in BLOCKING MODE 
Opening in BLOCKING MODE 
Opening in BLOCKING MODE 
NvMMLiteOpen : Block : BlockType = 261 
NvMMLiteOpen : Block : BlockType = 261 
NvMMLiteOpen : Block : BlockType = 261 
NvMMLiteOpen : Block : BlockType = 261 
NvMMLiteBlockCreate : Block : BlockType = 261 
NvMMLiteBlockCreate : Block : BlockType = 261 
NvMMLiteBlockCreate : Block : BlockType = 261 
NvMMLiteBlockCreate : Block : BlockType = 261 
** INFO: <bus_callback:277>: Pipeline running


!![Exception] [NvMOTContext::processFrame()] motFrame->bufferList[i]->colorFormat != m_Config.perTransformBatchConfig[i].colorFormat
An exception occurred. [NvMOTContext::processFrame()] motFrame->bufferList[i]->colorFormat != m_Config.perTransformBatchConfig[i].colorFormat
gstnvtracker: Low-level tracker lib returned error 1
0:00:00.938009792   926 0xfffe9c006550 WARN           nvinferserver gstnvinferserver.cpp:564:gst_nvinfer_server_push_buffer:<primary_gie> error: Internal data stream error.
ERROR from tracking_tracker: Failed to submit input to tracker
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvtracker2/gstnvtracker.cpp(792): gst_nv_tracker_submit_input_buffer (): /GstPipeline:pipeline/GstBin:tracking_bin/GstNvTracker:tracking_tracker
0:00:00.938057120   926 0xfffe9c006550 WARN           nvinferserver gstnvinferserver.cpp:564:gst_nvinfer_server_push_buffer:<primary_gie> error: streaming stopped, reason error (-5)
ERROR from primary_gie: Internal data stream error.
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinferserver/gstnvinferserver.cpp(564): gst_nvinfer_server_push_buffer (): /GstPipeline:pipeline/GstBin:primary_gie_bin/GstNvInferServer:primary_gie:
streaming stopped, reason error (-5)
ERROR from tracking_tracker: Failed to submit input to tracker
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvtracker2/gstnvtracker.cpp(792): gst_nv_tracker_submit_input_buffer (): /GstPipeline:pipeline/GstBin:tracking_bin/GstNvTracker:tracking_tracker
Quitting
nvstreammux: Successfully handled EOS for source_id=0
nvstreammux: Successfully handled EOS for source_id=1
nvstreammux: Successfully handled EOS for source_id=2
nvstreammux: Successfully handled EOS for source_id=3
ERROR from tracking_tracker: Failed to submit input to tracker
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvtracker2/gstnvtracker.cpp(792): gst_nv_tracker_submit_input_buffer (): /GstPipeline:pipeline/GstBin:tracking_bin/GstNvTracker:tracking_tracker
ERROR from tracking_tracker: Failed to submit input to tracker
Debug info: /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvtracker2/gstnvtracker.cpp(792): gst_nv_tracker_submit_input_buffer (): /GstPipeline:pipeline/GstBin:tracking_bin/GstNvTracker:tracking_tracker
[NvMultiObjectTracker] De-initialized
App run failed

I can again confirm that when the “input-tensor-meta”-property of the nvtracker-element is set to 0 in the configuration file source4_1080p_dec_preprocess_infer-resnet_tracker_preprocess_sgie_tiled_display_int8.txt, the exception is not thrown.

Thanks for reporting the issue. We will fix the issue in next release. Stay tuned.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.