I’m trying to create a ID3D11VideoDecoder for decoding HEVC Main10 profile with the Direct3D11 video API.
The problem I’m facing is the following:
- I'm calling ID3D11VideoDevice::CheckVideoDecoderFormat to see if DXGI_FORMAT_NV12 is supported - this function succeeds and returns TRUE (format supported)
- In the next step I call ID3D11VideoDevice::GetVideoDecoderConfigCount to get the available configs. I set the correct profile GUID, DXGI_FORMAT_NV12 as the OutputFormat and a resolution of 1920x1088
- ID3D11VideoDevice::GetVideoDecoderConfigCount succeeds but returns a config count of 0
I have also tried to loop through resolutions starting from 1x1 to 2000x2000 - none of them worked.
I started playing around a bit and noticed that when I use DXGI_FORMAT_P010 or DXGI_FORMAT_420_OPAQUE as the OutputFormat I receive a config count > 0. However in my application I need NV12 as the OutputFormat. To me this looks like a driver bug.
I have attached a little code snippet that illustrates the problem:
#include <atlbase.h>
#include <d3d11_1.h>
#pragma comment(lib, "dxgi.lib")
#pragma comment(lib, "d3d11.lib")
#pragma comment(lib, "dxguid.lib")
int _tmain(int argc, _TCHAR* argv[])
{
CComPtr<ID3D11Device> device;
CComPtr<ID3D11DeviceContext> context;
const D3D_FEATURE_LEVEL feature_levels[] =
{
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0
};
HRESULT hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
feature_levels, ARRAYSIZE(feature_levels), D3D11_SDK_VERSION, &device, NULL, &context);
if (FAILED(hr))
{
_tprintf(_T("D3D11CreateDevice failed: %08X\n"), hr);
return -1;
}
CComPtr<ID3D11VideoDevice> video_device;
hr = device.QueryInterface(&video_device);
if (FAILED(hr))
{
_tprintf(_T("QueryInterface for video device failed: %08X\n"), hr);
return -1;
}
BOOL supported = FALSE;
hr = video_device->CheckVideoDecoderFormat(&D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, DXGI_FORMAT_NV12, &supported);
if (SUCCEEDED(hr) && supported)
{
UINT config_count;
D3D11_VIDEO_DECODER_DESC decoder_desc;
decoder_desc.Guid = D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10;
decoder_desc.OutputFormat = DXGI_FORMAT_NV12;
decoder_desc.SampleWidth = 1920;
decoder_desc.SampleHeight = 1088;
// succeeds but returns config_count = 0
hr = video_device->GetVideoDecoderConfigCount(&decoder_desc, &config_count);
// succeeds and returns config_count = 1
decoder_desc.OutputFormat = DXGI_FORMAT_P010;
hr = video_device->GetVideoDecoderConfigCount(&decoder_desc, &config_count);
}
return 0;
}
My system specs:
Windows 10 Enterprise 64-bit (10.0, Build 14393) (14393.rs1_release.161220-1747)
NVIDIA GeForce GTX 960, 4 GB
Driver: 378.66