HI nividia,
I want to capture and encode , i tired to combine 12th (capturing) and 1st(encoding) example but getting crash or hang problem .
Problem is
- how to fill the capture data into encoder buffer.
- Need to replace read_video_frame.
- In read_video_frame function , Nv_Buffer is getting used but capture code having data in v4l2_buf buffer.
Do you have any example which captures and encodes for TX2 Jetpack 3.2
below is the core code:
ctx->conv->capture_plane.setDQThreadCallback(conv_capture_dqbuf_thread_callback);
ctx->conv->output_plane.setDQThreadCallback(conv_output_dqbuf_thread_callback);
//Encoder//
ret = pVideoAudioEncode->VCE_InitEncConfig(pstEncCtx);
if(ret == FAILURE)
{
goto cleanup;
}
pstEncCtx->enc->capture_plane.setDQThreadCallback(encoder_capture_plane_dq_callback);
//Capture//
// Start VIC processing thread
ctx->conv->capture_plane.startDQThread(ctx);
ctx->conv->output_plane.startDQThread(ctx);
// startDQThread starts a thread internally which calls the
// encoder_capture_plane_dq_callback whenever a buffer is dequeued
// on the plane
pstEncCtx->enc->capture_plane.startDQThread(pstEncCtx);
ret = pVideoAudioEncode->VCE_EnqueueEmptyCapturePlane(pstEncCtx);
if(ret == FAILURE)
{
goto cleanup;
}
// Enable render profiling information
//ctx->renderer->enableProfiling();
fds[0].fd = ctx->cam_fd;
fds[0].events = POLLIN;
while (poll(fds, 1, 5000) > 0 && !ctx->got_error &&
!ctx->conv->isInError() && !quit)
{
if (fds[0].revents & POLLIN)
{
struct v4l2_buffer v4l2_buf;
// Dequeue camera buff
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_DMABUF;
if (ioctl(ctx->cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
ERROR_RETURN("Failed to dequeue camera buff: %s (%d)",
strerror(errno), errno);
ctx->frame++;
printf("\n frame No = %d \n",ctx->frame);
if (ctx->frame == ctx->save_n_frame)
save_frame_to_file(ctx, &v4l2_buf);
// Push nv_buffer into conv output queue for conversion
pthread_mutex_lock(&ctx->queue_lock);
ctx->conv_output_plane_buf_queue->push(&ctx->g_buff[v4l2_buf.index]);
pthread_cond_broadcast(&ctx->queue_cond);
pthread_mutex_unlock(&ctx->queue_lock);
//Encoder//
pVideoAudioEncode->VCE_ReadAndQueueOuputPlanesBuff(&eos, &input_frames_queued_count,&v4l2_buf);
// Enqueue camera buff
// It might be more reasonable to wait for the completion of
// VIC processing before enqueue current buff. But VIC processing
// time is far less than camera frame interval, so we probably
// don't need such synchonization.
if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &v4l2_buf))
ERROR_RETURN("Failed to queue camera buffers: %s (%d)",
strerror(errno), errno);
}
}
if (quit && !ctx->conv->isInError())
{
// Signal EOS to the dq thread of VIC output plane
ctx->g_buff[0].dmabuff_fd = 0;
pthread_mutex_lock(&ctx->queue_lock);
ctx->conv_output_plane_buf_queue->push(&ctx->g_buff[0]);
pthread_cond_broadcast(&ctx->queue_cond);
pthread_mutex_unlock(&ctx->queue_lock);
}
// Stop VIC dq thread
if (!ctx->got_error)
{
ctx->conv->output_plane.deinitPlane();
ctx->conv->capture_plane.deinitPlane();
}
VCE_ReadAndQueueOuputPlanesBuff(bool *bEos, unsigned int *pnInputFramesQueuedCount,struct v4l2_buffer *pv4l2_buffer)
{
int ret = SUCCESS;
Enccontext_t *pstEncCtx = m_pstEncCtx;
int i;
// Read video frame and queue all the output plane buffers +
// Keep reading input till EOS is reached
while (!pstEncCtx->got_error && !pstEncCtx->enc->isInError() && !*bEos)
{
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
NvBuffer *buffer ;
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
memset(planes, 0, sizeof(planes));
v4l2_buf.m.planes = planes;
if(*pnInputFramesQueuedCount < pstEncCtx->enc->output_plane.getNumBuffers())
{
v4l2_buf.index = *pnInputFramesQueuedCount;
buffer = pstEncCtx->enc->output_plane.getNthBuffer(*pnInputFramesQueuedCount);
}
else
{
// need to fill data
if (pstEncCtx->enc->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0)
{
cerr << "ERROR while DQing buffer at output plane" << endl;
VCE_Abort(pstEncCtx);
goto cleanup;
}
}
if (pstEncCtx->runtime_params_str &&
(pstEncCtx->enc->output_plane.getTotalQueuedBuffers() ==
pstEncCtx->next_param_change_frame))
{
VCE_SetRuntimeParams(pstEncCtx);
if (pstEncCtx->runtime_params_str)
VCE_GetNextRuntimeParamChangeFrame(pstEncCtx);
}
for(i = 0; i < buffer->n_planes ; i++)
{
buffer->planes[i].data = (unsigned char *)pv4l2_buffer->m.planes[i].m.userptr;
//buffer->planes[i].bytesused = pv4l2_buffer->m.planes[i].bytesused;
//buffer->planes[i].fd = pv4l2_buffer->m.planes[i].m.fd;
}
printf("\n fill the buffer \n");
//if (read_video_frame(pstEncCtx->in_file, *buffer) < 0)
//{
// cerr << "Could not read complete frame from input file" << endl;
// v4l2_buf.m.planes[0].bytesused = 0;
//}
ret = pstEncCtx->enc->output_plane.qBuffer(v4l2_buf, NULL);
if (ret < 0)
{
cerr << "Error while queueing buffer at output plane" << endl;
VCE_Abort(pstEncCtx);
goto cleanup;
}
if (v4l2_buf.m.planes[0].bytesused == 0)
{
cerr << "File read complete." << endl;
*bEos = true;
break;
}
*pnInputFramesQueuedCount++;
}
ret = SUCCESS;
cleanup:
if(ret != SUCCESS)
{
ret = FAILURE;
}
return ret;
}