Hello Everyone!
I am currently using a USB camera and encoding the received frames using the NVIDIA video encoder. I used the examples for the tegra_multimedia_api (01_video_encode and 03_video_cuda_enc (just following the encode part)).
Occasionally (after flashing the TX2 with JetPack), I would get horizontal lines in my video (notice the bar on the right side of the provided screenshot). The horizontal lines would also show up frequently as individual longer lines throughout the main area of the video, depending on how much movement occurred in the video (the more movement, the more horizontal lines appear all over).
I was using JetPack v 3.1 and saw this occur maybe only once (was able to fix it by reflashing the TX2 with JetPack 3.1 again). I recently upgraded to JetPack 3.3 and the error appears to happen much more frequently (50% or more). It’ll happen to all videos encoded, and can be fixed by reflashing the TX2 with JetPack. The frequency of this occurring has me concerned that it is something I am doing wrong with my code (although that it can be fixed by reflashing is also odd).
I was hoping that the people that know way more about video encoding could tell me what this type of bad artifact is likely a result from, and if there is some area of my code I should be focused on to figure out how to refix it without reflashing the TX2.
The frames coming in are of openCV format (cv::Mat)
I follow essentially the encoder creation shown in the 03_video_cuda_enc example (substituted in actual numbers I am using):
// In my setup_encoder function
ctx.enc = NvVideoEncoder::createVideoEncoder("enc0");
ret = ctx.enc->setCapturePlaneFormat(V4L2_PIX_FMT_H264, 1440, 1080, 4 * 1024 * 1024);
ret = ctx.enc->setOutputPlaneFormat(V4L2_PIX_FMT_YUV420M, 1440, 1080);
ret = ctx.enc->setBitrate(4194304);
if (ctx.encoder_pixfmt == V4L2_PIX_FMT_H264)
{
ret = ctx.enc->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH);
}
else
{
ret = ctx.enc->setProfile(V4L2_MPEG_VIDEO_H265_PROFILE_MAIN);
}
if (ctx.encoder_pixfmt == V4L2_PIX_FMT_H264)
{
ret = ctx.enc->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_5_1);
// Check ret
}
ret = ctx.enc->setRateControlMode(V4L2_MPEG_VIDEO_BITRATE_MODE_CBR);
ret = ctx.enc->setIDRInterval(25);
ret = ctx.enc->setIFrameInterval(5);
ret = ctx.enc->setFrameRate(5, 1);
ret = ctx.enc->output_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false); // Sets 10 buffers, but max is 6, so really only gets 6
ret = ctx.enc->capture_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false); // Sets 10 buffers, but max is 6, so really only gets 6
ret = ctx.enc->output_plane.setStreamStatus(true);
ret = ctx.enc->capture_plane.setStreamStatus(true);
ctx.enc->capture_plane.startDQThread(&ctx);
for (uint32_t i = 0; i < ctx.enc->capture_plane.getNumBuffers(); i++)
{
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
v4l2_buf.index = i;
v4l2_buf.m.planes = planes;
ret = ctx.enc->capture_plane.qBuffer(v4l2_buf, NULL);
// Check ret
}
// Using greyscale data so U and V are set to 128
for (uint32_t i = 0; i < ctx.enc->output_plane.getNumBuffers(); i++)
{
NvBuffer* buffer = ctx.enc->output_plane.getNthBuffer(i);
NvBuffer::NvBufferPlane& uPlane = buffer->planes[1];
NvBuffer::NvBufferPlane& vPlane = buffer->planes[2];
memset(uPlane.data, 128, (uPlane.fmt.stride * uPlane.fmt.height));
memset(vPlane.data, 128, (vPlane.fmt.stride * vPlane.fmt.height));
uPlane.bytesused = uPlane.fmt.stride * uPlane.fmt.height;
vPlane.bytesused = vPlane.fmt.stride * vPlane.fmt.height;
}
// Then process is called for every frame the camera captures, the frame is in openCV Mat
// ctx is a global variable FYI
cv::Mat& process(cv::Mat& frame)
{
int ret;
struct v4l2_buffer v4l2Buf;
struct v4l2_plane planes[MAX_PLANES];
NvBuffer* buffer;
memset(&v4l2Buf, 0, sizeof(v4l2Buf));
memset(planes, 0, sizeof(planes));
v4l2Buf.m.planes = planes;
if (ctx.enc->output_plane.getNumQueuedBuffers() < ctx.enc->output_plane.getNumBuffers()
&& next_initial_buffer_ < ctx.enc->output_plane.getNumBuffers())
{
buffer = ctx.enc->output_plane.getNthBuffer(next_initial_buffer_);
v4l2Buf.index = next_initial_buffer_++;
}
else
{
ret = ctx.enc->output_plane.dqBuffer(v4l2Buf, &buffer, NULL, NUM_QUEUE_RETRIES);
// Check ret
}
if (frame.data == nullptr)
{
v4l2Buf.m.planes[0].bytesused = 0;
}
else
{
NvBuffer::NvBufferPlane& yPlane = buffer->planes[0];
unsigned char* planeData = yPlane.data;
for (uint32_t row = 0; row < ((uint32_t)frame.rows); row++)
{
memcpy(planeData, frame.data, frame.cols);
planeData += yPlane.fmt.stride;
frame.data += frame.cols;
}
yPlane.bytesused = yPlane.fmt.stride * yPlane.fmt.height;
}
ret = ctx.enc->output_plane.qBuffer(v4l2Buf, NULL);
// Check ret
return frame;
}
Thanks in advance for any help.
- ydyla7g7