How could I save buffer data from nvdsinfer

Hi,

I encountered inference error using deepstream, so I am debugging th nvinfer plugin.
When I read the nvdsinfer source code, I found there is a void pointer sends to the enqueueV2 function(Code is attached below)

m_Context->enqueueV2(bindingBuffers.data(), stream,
            (consumeEvent ? &consumeEvent->ptr() : nullptr))

I think this should be the data finally sends to the tensorrt for inference. So I want to dump this data to a txt file to see what the final data is like, but I dont know how to do this. Could anyone could give me some hints?

NvDsInferStatus
FullDimTrtBackendContext::enqueueBuffer(
    const std::shared_ptr<InferBatchBuffer>& buffer, CudaStream& stream,
    CudaEvent* consumeEvent)
{
    assert(m_Context);
    assert(stream.ptr());

    std::vector<void*> bindingBuffers = buffer->getDeviceBuffers();

    for (int iL = 0; iL < (int)m_AllLayers.size(); ++iL)
    {
        if (!m_AllLayers[iL].isInput)
            continue;

        NvDsInferBatchDims batchDims = buffer->getBatchDims(iL);
        assert(batchDims.batchSize == buffer->getBatchDims(0).batchSize);
        //fix for onnx model which has fixed input dimensions
        if(batchDims.batchSize < m_AllLayers[iL].profileDims[kSELECTOR_MIN].batchSize)
            batchDims.batchSize  = m_AllLayers[iL].profileDims[kSELECTOR_MIN].batchSize;
        if (!canSupportBatchDims(iL, batchDims))
        {
            dsInferError(
                "Failed to enqueue buffer in fulldims mode because "
                "binding idx: %d with batchDims: %s is not supported ",
                iL, safeStr(batchDims2Str(batchDims)));
            return NVDSINFER_INVALID_PARAMS;
        }

        nvinfer1::Dims dimsWBatch =
            CombineDimsBatch(batchDims.dims, batchDims.batchSize);
        nvinfer1::Dims lastDimsBatch = m_Context->getBindingDimensions(iL);
        if (dimsWBatch != lastDimsBatch)
        {
            if (!m_Context->setBindingDimensions(iL, dimsWBatch))
            {
                dsInferError(
                    "Failed to enqueue buffer when setting bindings idx:%d with "
                    "dims:%s",
                    iL, safeStr(dims2Str(dimsWBatch)));
                return NVDSINFER_INVALID_PARAMS;
            }
        }
    }

    if (!m_Context->allInputDimensionsSpecified())
    {
        dsInferError(
            "Failed to enqueue buffer because context dims are not specified "
            "in dynamic mode");
        return NVDSINFER_TENSORRT_ERROR;
    }
    if (!m_Context->enqueueV2(bindingBuffers.data(), stream,
            (consumeEvent ? &consumeEvent->ptr() : nullptr)))
    {
        dsInferError("Failed to enqueue trt inference batch");
        return NVDSINFER_TENSORRT_ERROR;
    }

    return NVDSINFER_SUCCESS;
}

Here is my full system configuration:
Hardware Platform (Jetson / GPU) : Xavier NX
DeepStream Version : 5.0
JetPack Version (valid for Jetson only) : 4.4
TensorRT Version : 7.1.3

Please refer to DeepStream SDK FAQ - Intelligent Video Analytics / DeepStream SDK - NVIDIA Developer Forums, the " 2. [DS5.0GA_Jetson_GPU_Plugin] Dump the Inference Input" part.

Thanks it works

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.