# triton inference configuration for deepstream # model: yolov5_trt infer_config { unique_id: 1 max_batch_size: 1 backend { trt_is { model_name: "yolov5" version: 1 model_repo { root: "../../models" log_level: 2 strict_model_config: false tf_gpu_memory_fraction: 0.4 tf_disable_soft_placement: 0 } } } preprocess { network_format: IMAGE_FORMAT_RGB tensor_order: TENSOR_ORDER_LINEAR frame_scaling_hw: FRAME_SCALING_HW_DEFAULT maintain_aspect_ratio: 0 normalize { #1/255 = 0.03921569 scale_factor: 0.003921569 channel_offsets: [0, 0, 0] } } #Mandatory to specify kind of post processing postprocess { other {} } # Yolov5 lib for triton custom_lib { path: "/opt/nvidia/deepstream/deepstream/code/lib/yolov5_trt/x64/yolov5.so" } } input_control { process_mode: PROCESS_MODE_FULL_FRAME interval: 0 } # Custom process output tensors output_control { output_tensor_meta: true }