hello experts, i want to implement video decode with buffer queue just like the encoder in sample/frontend.
but always got error when decoder initiating, failed to get return value from dqEvent().
can you please have a look? thx a lot.
Failed to query video capabilities: Inappropriate ioctl for device
[DEBUG] (NvV4l2Element.cpp:70) :Opened, fd = 14
[DEBUG] (NvV4l2Element.cpp:228) :Successfully subscribed to event 5
[DEBUG] (NvV4l2Element.cpp:190) :Set controls
[DEBUG] (NvVideoDecoder.cpp:174) :Disabling decoder complete frame input buffer: success
NvMMLiteOpen : Block : BlockType = 261
TVMR: NvMMLiteTVMRDecBlockOpen: 7650: NvMMLiteBlockOpen
NvMMLiteBlockCreate : Block : BlockType = 261
[DEBUG] (NvV4l2ElementPlane.cpp:290) :Output Plane:VIDIOC_S_FMT at capture plane successful
NvMMLiteTVMRDecSetAttribute:: Error status reporting set to 1
here is my code section:
in main():
pthread_create(&rtspClientHandle, &attr, rtspClientThread, NULL); // to feed decoder buffer queue
...
pthread_create(&decodeHandle, &attr, decoderThread, NULL);
in rtspClientThread()
in a loop:
BufferInfo buf = rtspClinetBufferQueue.pop(); // get empty buffer from input queue
void *wPtr = NULL;
NvBufferMemMap(buf.fd, 0, NvBufferMem_Write, &wPtr);
buf.size = rtspClientRead();
success = rtspClinetOutBufferQueue.push(buf); // put filled buffer to output queue
in decoderThread()
VideoDecoder videoDecoder("dec");
videoDecoder.initialize(); // 1.subscribeEvent(V4L2_EVENT_RESOLUTION_CHANGE, 0, 0);
// 2.disableCompleteFrameInputBuffer();
// 3.setOutputPlaneFormat(V4L2_PIX_FMT_H264, chunkSize);
// 4.output_plane.setupPlane(<b>V4L2_MEMORY_DMABUF</b>, 10, false, false);
// 5.output_plane.setStreamStatus(true);
// 6.capture_plane.setDQThreadCallback();
// 7.capture_plane.startDQThread
while (output_plane.getNumQueuedBuffers() < output_plane.getNumBuffers())
{
BufferInfo buf = rtspClinetOutBufferQueue.pop(); // get filled buffer from another thread
v4l2_buf.index = videoDecoder.m_VideoDecoder->output_plane.getNumQueuedBuffers();
v4l2_buf.m.planes = planes;
v4l2_buf.m.planes[0].m.fd = buf.fd;
v4l2_buf.m.planes[0].bytesused = buf.size;
videoDecoder.m_VideoDecoder->output_plane.qBuffer(v4l2_buf, NULL); // queue the buffer to output plane
videoDecoder.insertFdToDmaBufFdSet(buf.fd);
resetDecoderResolution(); // one frame each time, no loop in this function
// ret = dqEvent(ev, false);
// if (ret == 0 && V4L2_EVENT_RESOLUTION_CHANGE == ev.type)
// capture_plane.getFormat(format);
// capture_plane.getCrop(crop);
// capture_plane.deinitPlane();
// setCapturePlaneFormat(<b>V4L2_PIX_FMT_NV12M</b>/*format.fmt.pix_mp.pixelformat*/, format.fmt.pix_mp.width, format.fmt.pix_mp.height);
// getMinimumCapturePlaneBuffers(min_dec_capture_buffers);
// capture_plane.setupPlane(V4L2_MEMORY_MMAP, min_dec_capture_buffers + 5, false, false);
// capture_plane.setStreamStatus(true);
// Enqueue all the empty capture plane buffers
// return true;
// else
// return false;
rtspClinetBufferQueue.push(buf); // return buf to rtspClientThread() thread
}
//process
while (1)
{
BufferInfo buf = rtspClinetOutBufferQueue.pop();
ret = videoDecoder.decodeFromFd(buf); // decode one frame
videoDecOutBufferQueue.push(buf); // put yuv data to post-process
}