Please provide complete information as applicable to your setup.
- Hardware:Jetson Orin Nano 8GB
- JetPack Version:6.2 [L4T 36.4.3]
- AI NVR Version:2.0.1
- VST Version:1.3.0-24.07.1
- DeepStream Version:7.1
I just followed the quick start guid,and when i try edit the ds-config_nano.yaml or compose_nano.yaml,the deepstream container display restarting(0)
################################################################################
SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
SPDX-License-Identifier: MIT
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the “Software”),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
################################################################################
deepstream:
nodes:
- type: nvinfer
name of the primary inference must be ‘pgie’ for test app to route streams here
name: pgie
properties:
config-file-path: “/ds-config-files/pn26/config_infer_primary_RN34_PN26_960x544_orin_unprune_nano.txt”
model-engine-file: “/pn26-files/pn26_jp6_halfmem_bs4_gpu.engine”
unique-id: 1be sure to rename model-engine-file whenever batch-size is changed
batch-size: 4 - type: nvtracker
name: tracker
properties:
ll-config-file: “/opt/nvidia/deepstream/deepstream-7.0/samples/configs/deepstream-app/config_tracker_NvDCF_perf.yml”
ll-lib-file: “/opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so”
tracker-width: 960
tracker-height: 544 - type: nvmsgconv
name: msgconv
properties:
payload-type: 1 - type: nvmsgbroker
name: msgbroker
properties:
config: “/ds-config-files/pn26/cfg_redis.txt”
proto-lib: “/opt/nvidia/deepstream/deepstream/lib/libnvds_redis_proto.so”
conn-str: “localhost;6379;test”
topic: “test”
sync: false
async: false - type: queue
name: checkpoint - type: identity
name: tiler
properties:
width: 1280
height: 720 - type: nvdsosd
name: osd - type: nvvideoconvert
name: converter - type: tee
name: tee - type: queue
name: queue1 - type: queue
name: queue2 - type: nvvideoconvert
name: converter1 - type: nvrtspoutsinkbin
name: sink
properties:
rtsp-port: 8555
enc-type: 1
sync: false - type: sample_video_probe.sample_video_probe
name: osd_counter
properties:
font-size: 15
edges:
pgie: [tracker, osd_counter]
tracker: checkpoint
checkpoint: tiler
tiler: converter
converter: osd
osd: tee
tee: [queue1, queue2]
queue1: converter1
converter1: sink
queue2: msgconv
msgconv: msgbroker
include:
- base_compose.yaml
services:
deepstream:
image: nvcr.io/nvidia/jps/deepstream:7.1-public-v1
user: “2006:150”
network_mode: “host”
logging:
driver: “json-file”
options:
max-size: “8192m”
max-file: “3”
container_name: deepstream
runtime: nvidia
volumes:- ./config/deepstream:/ds-config-files
- /tmp/.X11-unix/:/tmp/.X11-unix
- /data/logging-volume:/log
- /tmp:/tmp
depends_on:
moj-init-ds:
condition: service_completed_successfully
deploy:
resources:
limits:
memory: 5600M
restart_policy:
condition: always
#command: sh -c ‘deepstream-test5-app -c /ds-config-files/pn26/ds-config_nano.txt 2>&1 | grep --line-buffered . | tee -a /log/deepstream.log’
command: sh -c ‘/opt/nvidia/deepstream/deepstream/service-maker/sources/apps/cpp/deepstream_test5_app/build/deepstream-test5-app -s /ds-config-files/pn26/service-maker/source-list_nano.yaml -c /ds-config-files/pn26/service-maker/ds-config_nano.yaml -l /ds-config-files/pn26/labels.txt --perf-measurement-interval-sec 5 2>&1 | grep --line-buffered . | tee -a /log/deepstream.log’
sdr:
image: nvcr.io/nvidia/jps/sdr:2.2-8-14-v1
user: “2001:150”
network_mode: “host”
logging:
driver: “json-file”
options:
max-size: “8192m”
max-file: “3”
container_name: sdr
volumes:
- ./config/sdr:/wdm-configs
- /data/emdx-volume:/wdm-data
- /var/run/docker.sock:/var/run/docker.sock
- /data/logging-volume:/log
environment:
PORT: 4001
WDM_WL_SPEC: /wdm-data/ds-data_wl.yaml
WDM_CLUSTER_CONFIG_FILE: /wdm-configs/docker_cluster_config_single_pipeline.json
WDM_MSG_KEY: vst.event.modified
WDM_WL_REDIS_MSG_FIELD: metadata
WDM_WL_ADD_URL: /api/v1/stream/add
WDM_WL_DELETE_URL: /api/v1/stream/remove
WDM_WL_HEALTH_CHECK_URL: /api/v1/stream/add
WDM_WL_CHANGE_ID_ADD: camera_streaming
WDM_PRELOAD_WORKLOAD: ./tests/event_pre-roll.json
WDM_CLEAR_DATA_WL: true
WDM_KFK_ENABLE: false
WDM_DS_SWAP_ID_NAME: true
WDM_VALIDATE_BEFORE_ADD: true
WDM_PRELOAD_DELAY_FOR_DS_API: true
WDM_WL_THRESHOLD: 4
WDM_CLUSTER_TYPE: docker
WDM_POD_WATCH_DOCKER_DELAY: 0.5
WDM_DS_STATUS_CHECK: true
WDM_RESTART_DS_ON_ADD_FAIL: false
WDM_DISABLE_WERKZEUG_LOGGING: true
WDM_WL_OBJECT_NAME: sdr-deepstream
WDM_CONSUMER_GRP_ID: sdr-deepstream-cg
WDM_CLUSTER_CONTAINER_NAMES: ‘[“deepstream”, “vst”]’
WDM_WL_NAME_IGNORE_REGEX: “.deepstream.”
VST_STREAMS_ENDPOINT: http://localhost:30000/api/v1/live/streams
VST_STATUS_ENDPOINT: http://localhost:30000/api/v1/sensor/status
depends_on:
moj-http-based-init-sdr:
condition: service_completed_successfully
deploy:
resources:
limits:
memory: 300M
restart_policy:
condition: always
entrypoint:
command: sh -c ‘/wdm/dist/sdr 2>&1 | tee -a /log/sdr-deepstream.log’
sdr-reprovision-controller:
image: nvcr.io/nvidia/jps/sdr-reprovision-controller:2.2-8-14-v1
user: “2001:150”
network_mode: “host”
logging:
driver: “json-file”
options:
max-size: “8192m”
max-file: “3”
volumes:
- ./config/sdr:/opt/config
environment:
VST_ENDPOINT: “http://localhost:30000/api/v1/live/streams”
REDIS_IP: “localhost”
REDIS_PORT: 6379
REDIS_CHANNEL: “vst.event”
SDR_PORT: 4001
SDR_CLUSTER_CONFIG_FILE: “/opt/config/docker_cluster_config_single_pipeline.json”
WL_CHANGE_ID_ADD: “camera_streaming”
WL_CHANGE_ID_DEL: “camera_remove”
SWAP_ID_NAME_VST: true
ENABLE_VST_RECONCILE: true
container_name: sdr-reprovision-controller
depends_on:
moj-http-based-init-sdr-reprovision-controller:
condition: service_completed_successfully
deploy:
resources:
limits:
memory: 100M
restart_policy:
condition: always
sdr-alertmanager-controller:
image: nvcr.io/nvidia/jps/sdr-alertmanager-controller:2.3.1
user: “2001:150”
network_mode: “host”
logging:
driver: “json-file”
options:
max-size: “8192m”
max-file: “3”
volumes:
- ./config/sdr:/root/its_monitoring/config
environment:
VST_ENDPOINT: “http://localhost:30000/api/v1/live/streams”
REDIS_IP: “localhost”
REDIS_PORT: 6379
REDIS_CHANNEL: “vst.event.modified”
SDR_PORT: 4001
SEND_ADDITIONAL_ALERT_THRESHOLD: 0
container_name: sdr-alertmanager-controller
depends_on:
moj-http-based-init-sdr-reprovision-controller:
condition: service_completed_successfully
deploy:
resources:
limits:
memory: 100M
restart_policy:
condition: always
sdr-proxy-controller:
image: nvcr.io/nvidia/jps/sdr-proxy-controller:2.2-8-14-v1
user: “2001:150”
network_mode: “host”
logging:
driver: “json-file”
options:
max-size: “8192m”
max-file: “3”
volumes:
- ./config/sdr:/root/its_monitoring/config
environment:
ENABLE_IPC_SUPPORT: true
REDIS_IP: “localhost”
REDIS_PORT: 6379
SDR_PORT: 4001
container_name: sdr-proxy-controller
depends_on:
moj-http-based-init-sdr-reprovision-controller:
condition: service_completed_successfully
deploy:
resources:
limits:
memory: 100M
restart_policy:
condition: always




