I use ffmpeg to demux and jetson_multimedia_api to decode.
When I input data, I can’t get the output.
ctx->dec->dqEvent blocked everything to return!
But I can get the output with samples of jetson_multimedia_api.
my code:
void *dec_capture_loop_fcn(void *arg){
nvmpictx* ctx=(nvmpictx*)arg;
struct v4l2_format v4l2Format;
struct v4l2_crop v4l2Crop;
struct v4l2_event v4l2Event;
int ret,buf_index=0;
while (!(ctx->dec->isInError()||ctx->eos)){
NvBuffer *dec_buffer;
ret = ctx->dec->dqEvent(v4l2Event, ctx->got_res_event ? 0 : 500);
printf("dec_capture_loop_fcn 1111111 %d\n", ret);
if (ret == 0)
{
switch (v4l2Event.type)
{
case V4L2_EVENT_RESOLUTION_CHANGE:
respondToResolutionEvent(v4l2Format, v4l2Crop,ctx);
continue;
}
}
printf("dec_capture_loop_fcn 22222222222222\n");
if (!ctx->got_res_event) {
continue;
}
while(!ctx->eos){
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
v4l2_buf.m.planes = planes;
printf("dec_capture_loop_fcn 3333333333333333\n");
if (ctx->dec->capture_plane.dqBuffer(v4l2_buf, &dec_buffer, NULL, 0)){
if (errno == EAGAIN)
{
usleep(1000);
}
else
{
ERROR_MSG("Error while calling dequeue at capture plane");
ctx->eos=true;
}
break;
}
dec_buffer->planes[0].fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index];
NvBufferRect src_rect, dest_rect;
src_rect.top = 0;
src_rect.left = 0;
src_rect.width = ctx->coded_width;
src_rect.height = ctx->coded_height;
dest_rect.top = 0;
dest_rect.left = 0;
dest_rect.width = ctx->coded_width;
dest_rect.height = ctx->coded_height;
NvBufferTransformParams transform_params;
memset(&transform_params,0,sizeof(transform_params));
transform_params.transform_flag = NVBUFFER_TRANSFORM_FILTER;
transform_params.transform_flip = NvBufferTransform_None;
transform_params.transform_filter = NvBufferTransform_Filter_Smart;
transform_params.src_rect = src_rect;
transform_params.dst_rect = dest_rect;
ctx->mutex->lock();
if(!ctx->eos){
ret = NvBufferTransform(dec_buffer->planes[0].fd, ctx->dst_dma_fd, &transform_params);
TEST_ERROR(ret==-1, "Transform failed",ret);
NvBufferParams parm;
ret = NvBufferGetParams(ctx->dst_dma_fd, &parm);
if(!ctx->frame_size[0]){
for(int index=0;index<MAX_BUFFERS;index++){
ctx->bufptr_0[index]=new unsigned char[parm.psize[0]];//Y
ctx->bufptr_1[index]=new unsigned char[parm.psize[1]];//UV or UU
ctx->bufptr_2[index]=new unsigned char[parm.psize[2]];//VV
}
}
ctx->frame_linesize[0]=parm.width[0];
ctx->frame_size[0]=parm.psize[0];
ctx->frame_linesize[1]=parm.width[1];
ctx->frame_size[1]=parm.psize[1];
ctx->frame_linesize[2]=parm.width[2];
ctx->frame_size[2]=parm.psize[2];
ret=NvBuffer2Raw(ctx->dst_dma_fd,0,parm.width[0],parm.height[0],ctx->bufptr_0[buf_index]);
ret=NvBuffer2Raw(ctx->dst_dma_fd,1,parm.width[1],parm.height[1],ctx->bufptr_1[buf_index]);
if(ctx->out_pixfmt==NV_PIX_YUV420)
ret=NvBuffer2Raw(ctx->dst_dma_fd,2,parm.width[2],parm.height[2],ctx->bufptr_2[buf_index]);
ctx->frame_pools->push(buf_index);
ctx->timestamp[buf_index]= (v4l2_buf.timestamp.tv_usec % 1000000) + (v4l2_buf.timestamp.tv_sec * 1000000UL);
buf_index=(buf_index+1)%MAX_BUFFERS;
}
ctx->mutex->unlock();
if (ctx->eos) {
break;
}
ctx->has_frame_cv->notify_one();
v4l2_buf.m.planes[0].m.fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index];
if (ctx->dec->capture_plane.qBuffer(v4l2_buf, NULL) < 0){
ERROR_MSG("Error while queueing buffer at decoder capture plane");
}
}
}
// Wake all waiting threads at EOS or decoder error
ctx->has_frame_cv->notify_all();
return nullptr;
}
nvmpictx* nvmpi_create_decoder(nvCodingType codingType,nvPixFormat pixFormat){
int ret;
log_level = LOG_LEVEL_INFO;
nvmpictx* ctx=new nvmpictx;
ctx->dec = NvVideoDecoder::createVideoDecoder("dec0");
TEST_ERROR(!ctx->dec, "Could not create decoder",ret);
ret=ctx->dec->subscribeEvent(V4L2_EVENT_RESOLUTION_CHANGE, 0, 0);
TEST_ERROR(ret < 0, "Could not subscribe to V4L2_EVENT_RESOLUTION_CHANGE", ret);
switch(codingType){
case NV_VIDEO_CodingH264:
ctx->decoder_pixfmt=V4L2_PIX_FMT_H264;
break;
case NV_VIDEO_CodingHEVC:
ctx->decoder_pixfmt=V4L2_PIX_FMT_H265;
break;
case NV_VIDEO_CodingMPEG4:
ctx->decoder_pixfmt=V4L2_PIX_FMT_MPEG4;
break;
case NV_VIDEO_CodingMPEG2:
ctx->decoder_pixfmt=V4L2_PIX_FMT_MPEG2;
break;
case NV_VIDEO_CodingVP8:
ctx->decoder_pixfmt=V4L2_PIX_FMT_VP8;
break;
case NV_VIDEO_CodingVP9:
ctx->decoder_pixfmt=V4L2_PIX_FMT_VP9;
break;
default:
ctx->decoder_pixfmt=V4L2_PIX_FMT_H264;
break;
}
ret=ctx->dec->setOutputPlaneFormat(ctx->decoder_pixfmt, CHUNK_SIZE);
TEST_ERROR(ret < 0, "Could not set output plane format", ret);
//ctx->nalu_parse_buffer = new char[CHUNK_SIZE];
ret = ctx->dec->setFrameInputMode(0);
TEST_ERROR(ret < 0, "Error in decoder setFrameInputMode for NALU", ret);
ret = ctx->dec->output_plane.setupPlane(V4L2_MEMORY_USERPTR, 10, false, true);
TEST_ERROR(ret < 0, "Error while setting up output plane", ret);
ctx->dec->output_plane.setStreamStatus(true);
TEST_ERROR(ret < 0, "Error in output plane stream on", ret);
ctx->out_pixfmt=pixFormat;
ctx->dst_dma_fd=-1;
ctx->eos=false;
ctx->got_res_event=false;
ctx->index=0;
ctx->frame_size[0]=0;
ctx->frame_pools=new std::queue<int>;
ctx->mutex = new std::mutex();
ctx->has_frame_cv = new std::condition_variable();
for(int index=0;index<MAX_BUFFERS;index++)
ctx->dmaBufferFileDescriptor[index]=0;
for(int index=0;index<MAX_BUFFERS;index++){
ctx->bufptr_0[index] = nullptr;
ctx->bufptr_1[index] = nullptr;
ctx->bufptr_2[index] = nullptr;
}
ctx->numberCaptureBuffers=0;
ctx->dec_capture_loop=new thread(dec_capture_loop_fcn,ctx);
return ctx;
}
int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet){
int ret;
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
NvBuffer *nvBuffer;
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
memset(planes, 0, sizeof(planes));
v4l2_buf.m.planes = planes;
if (ctx->index < (int)ctx->dec->output_plane.getNumBuffers()) {
nvBuffer = ctx->dec->output_plane.getNthBuffer(ctx->index);
} else {
ret = ctx->dec->output_plane.dqBuffer(v4l2_buf, &nvBuffer, NULL, -1);
if (ret < 0) {
cout << "Error DQing buffer at output plane" << std::endl;
return false;
}
}
memcpy(nvBuffer->planes[0].data,packet->payload,packet->payload_size);
nvBuffer->planes[0].bytesused=packet->payload_size;
if (ctx->index < ctx->dec->output_plane.getNumBuffers())
{
v4l2_buf.index = ctx->index ;
v4l2_buf.m.planes = planes;
}
v4l2_buf.m.planes[0].bytesused = nvBuffer->planes[0].bytesused;
v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
v4l2_buf.timestamp.tv_sec = packet->pts / 1000000;
v4l2_buf.timestamp.tv_usec = packet->pts % 1000000;
ret = ctx->dec->output_plane.qBuffer(v4l2_buf, NULL);
if (ret < 0) {
std::cout << "Error Qing buffer at output plane" << std::endl;
return false;
}
if (ctx->index < ctx->dec->output_plane.getNumBuffers())
ctx->index++;
if (v4l2_buf.m.planes[0].bytesused == 0) {
ctx->eos=true;
std::cout << "Input file read complete" << std::endl;
}
return 0;
}