Hello everyone.
I decode one H.265 10bit file and force it output to NV12 format. I add the code int the function “int NvDecoder::HandleVideoSequence(CUVIDEOFORMAT *pVideoFormat)” before create video decoder,like this:
if (1)
{
//videoDecodeCreateInfo.bitDepthMinus8=0; //10 bit to 8bit
videoDecodeCreateInfo.OutputFormat = cudaVideoSurfaceFormat_NV12;
m_eOutputFormat = cudaVideoSurfaceFormat_NV12;
m_videoFormat.bit_depth_chroma_minus8 = 0;
m_videoFormat.bit_depth_luma_minus8 = 0;
m_nBPP = 1;
m_nBitDepthMinus8 = 0;
}
CUDA_DRVAPI_CALL(cuCtxPushCurrent(m_cuContext));
NVDEC_API_CALL(cuvidCreateDecoder(&m_hDecoder, &videoDecodeCreateInfo));
CUDA_DRVAPI_CALL(cuCtxPopCurrent(NULL));
STOP_TIMER("Session Initialization Time: ");
and the output image is good .
But when I decode a 8bit h.265 fie and want force it output to cudaVideoSurfaceFormat_P016,add code like this:
if (1)
{
//videoDecodeCreateInfo.bitDepthMinus8=0; //8bit to 10bit
videoDecodeCreateInfo.OutputFormat = cudaVideoSurfaceFormat_P016;
videoDecodeCreateInfo.bitDepthMinus8 = 2;
m_eOutputFormat = cudaVideoSurfaceFormat_P016;
m_videoFormat.bit_depth_chroma_minus8 = 2;
m_videoFormat.bit_depth_luma_minus8 = 2;
m_nBPP = 2;
m_nBitDepthMinus8 = 2;
}
CUDA_DRVAPI_CALL(cuCtxPushCurrent(m_cuContext));
NVDEC_API_CALL(cuvidCreateDecoder(&m_hDecoder, &videoDecodeCreateInfo));
CUDA_DRVAPI_CALL(cuCtxPopCurrent(NULL));
STOP_TIMER("Session Initialization Time: ");
and the outout image is bad.
May be someone faced with the same issue and know how to handle it?
Appreciate any help. Thanks.