cv::Mat to GstBuffer error; BlockType = 4, Failed to write input into the OpenMAX buffer

Hi,

I encounter a problem as below, please give a help.

ERROR Info:
Framerate set to : 10 at NvxVideoEncoderSetParameterNvMMLiteOpen : Block : BlockType = 4
===== NVMEDIA: NVENC =====
NvMMLiteBlockCreate : Block : BlockType = 4
H264: Profile = 66, Level = 40
ERROR: from element /GstPipeline:preview-record-pipeline/GstOMXH264Enc-omxh264enc:omxh264enc-omxh264enc0: Could not write to resource.
Additional debug info:
/dvs/git/dirty/git-master_linux/3rdparty/gst/gst-omx/omx/gstomxvideoenc.c(2367): gst_omx_video_enc_handle_frame (): /GstPipeline:preview-record-pipeline/GstOMXH264Enc-omxh264enc:omxh264enc-omxh264enc0:
Failed to write input into the OpenMAX buffer

Code:

void rgbCallback(const sensor_msgs::Image::ConstPtr& msg) {
ROS_ERROR(“rgbCallback enter.”);
GstBuffer *buf;
void *imgdata;
GstMapInfo map;
static GstClockTime timestamp = 0;
cv::Mat frame;
ROS_ERROR(“appsrc is not null encoding: %s.”, msg->encoding.c_str());
try {
cv_bridge::CvImageConstPtr imgPtr = cv_bridge::toCvShare(msg);
frame = (imgPtr->image);
}catch (cv_bridge::Exception& e) {
ROS_ERROR(“Could not convert to image!”);
return;
}
GstCaps *caps;
char *gst_type, *gst_format = (char *) “”;

if (appsrc != NULL) {
// Set caps from message
ROS_ERROR(“appsrc is not null.”);
//get_format(msg->format.c_str(), &gst_type, &gst_format);

caps = gst_caps_new_simple (“video/x-raw”,
“format”, G_TYPE_STRING, “I420”,
“width”, G_TYPE_INT, out_width_,
“height”, G_TYPE_INT, out_height_,
“framerate”, GST_TYPE_FRACTION, framerate_,1,
NULL);

ROS_ERROR(“width: %d, height: %d, framerate_: %d.”, frame.size().width,
frame.size().height, framerate_);
gst_app_src_set_caps((GstAppSrc ) (appsrc), caps);
gst_caps_unref (caps);
cv::Mat yuvImage;//yuv420m格式
cv::Mat img640;//修改分辨率后
cv::imwrite(std::to_string(dump_pic_num % 200) + "origin" + “.jpg”, frame);
cv::cvtColor(frame, yuvImage, CV_RGB2YUV_I420); //转换颜色空间
cv::resize(yuvImage, img640, cv::Size(out_width
, out_height_), cv::INTER_LINEAR);//转换分辨率
cv::Mat
img = new cv::Mat(img640);
cv::imwrite(std::to_string(dump_pic_num % 200) + “convert" + “.jpg”, img640);
dump_pic_num++;
//buf = gst_buffer_new_wrapped(img->data,
// img->total() * img->elemSize());
ROS_ERROR("img width: %d, img height: %d, framerate
: %d.”, out_width_,
out_height_, framerate_);
gsize sizeInBytes = img->total() * img->elemSize();
buf = gst_buffer_new_wrapped_full((GstMemoryFlags)0, (gpointer)(img->data), sizeInBytes, 0, sizeInBytes, (gpointer)img, (GDestroyNotify)buffer_destroy);

ROS_ERROR(“rgbCallback 6”);
GST_BUFFER_PTS(buf) = timestamp;
GST_BUFFER_DTS(buf) = timestamp;
GST_BUFFER_DURATION(buf) = gst_util_uint64_scale_int(1, GST_SECOND,
framerate_);
timestamp += GST_BUFFER_DURATION(buf);
ROS_ERROR(“rgbCallback 7”);
gst_app_src_push_buffer((GstAppSrc *) (appsrc), buf);
ROS_ERROR(“rgbCallback 8”);
}
}

Hi.
From the logs it looks like a L4T platform and not NVIDIA Optical Flow SDK for dGPU.
Can you confirm which hardware platform you are using?
Based on that you should look into one of these,
https://devtalk.nvidia.com/default/board/139/jetson-embedded-systems/

Let me know if you need further help.

Thanks.