[application] enable-perf-measurement=1 perf-measurement-interval-sec=1 #gie-kitti-output-dir=streamscl [tiled-display] enable=1 rows=4 columns=1 width=1920 height=1080 gpu-id=0 #(0): nvbuf-mem-default - Default memory allocated, specific to particular platform #(1): nvbuf-mem-cuda-pinned - Allocate Pinned/Host cuda memory, applicable for Tesla #(2): nvbuf-mem-cuda-device - Allocate Device cuda memory, applicable for Tesla #(3): nvbuf-mem-cuda-unified - Allocate Unified cuda memory, applicable for Tesla #(4): nvbuf-mem-surface-array - Allocate Surface Array memory, applicable for Jetson nvbuf-memory-type=0 [source0] enable=1 #Type - 1=CameraV4L2 2=URI 3=MultiURI type=3 uri=file:/opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 #uri=rtmp://10.1.130.200:11935/hls/32010500001320000007_32010500001320000007 num-sources=4 drop-frame-interval=0 gpu-id=0 # (0): memtype_device - Memory type Device # (1): memtype_pinned - Memory type Host Pinned # (2): memtype_unified - Memory type Unified cudadec-memtype=0 [sink0] enable=1 #Type - 1=FakeSink 2=EglSink 3=File type=3 sync=0 qos=1 #source-id=0 gpu-id=0 enc-type=0 nvbuf-memory-type=0 #1=mp4 2=mkv container=1 #1=h264 2=h265 codec=1 rtsp-port=8554 output-file=/opt/nvidia/deepstream/deepstream/samples/yolov4.mp4 [osd] enable=1 gpu-id=0 border-width=1 text-size=12 text-color=1;1;1;1; text-bg-color=0.3;0.3;0.3;1 font=Serif show-clock=0 clock-x-offset=800 clock-y-offset=820 clock-text-size=12 clock-color=1;0;0;0 nvbuf-memory-type=0 [streammux] gpu-id=0 ##Boolean property to inform muxer that sources are live live-source=0 batch-size=4 ##time out in usec, to wait after the first buffer is available ##to push the batch even if the complete batch is not formed batched-push-timeout=40000 ## Set muxer output width and height width=1920 height=1080 ##Enable to maintain aspect ratio wrt source, and allow black borders, works ##along with width, height properties enable-padding=0 nvbuf-memory-type=0 # config-file property is mandatory for any gie section. # Other properties are optional and if set will override the properties set in # the infer config file. [primary-gie] enable=1 gpu-id=0 #(0): nvinfer - Default inference plugin based on Tensorrt #(1): nvinferserver - inference plugin based on Tensorrt-Inference-Server plugin-type=1 batch-size=4 #Required by the app for OSD, not a plugin property bbox-border-color0=1;0;0;1 bbox-border-color1=0;1;1;1 bbox-border-color2=0;0;1;1 bbox-border-color3=0;1;0;1 interval=0 gie-unique-id=1 nvbuf-memory-type=0 config-file=config_infer_primary_detector_yolov5.txt [tracker] enable=1 tracker-width=1920 tracker-height=1080 enable-batch-process=1 enable-past-frame = 1 ll-config-file = /opt/nvidia/deepstream/deepstream/samples/configs/deepstream-app-trtis/tracker_config.yml #ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_mot_klt.so #ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvdcf.so #ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_mot_iou.so ll-lib-file=/opt/nvidia/deepstream/deepstream-6.0/sources/gst-plugins/gst-tracker/libnvds_customtracker.so gpu-id=0 iou-threshold=0.3 [tests] file-loop=0