Hi!
For a project, I was hoping to get accurate (kernel) timestamps of images taken by the camera. Taking inspiration from the argus samples in /usr/src/jetson_multimedia_api/argus/samples, a teammate of mine wrote some LibArgus code with a Cuda consumer in order to quickly convert the images to a ROS acceptable format (BGR8 in this case). This worked wonders for getting the image in the correct format, but we could not work out a way to get the kernel timestamp from the LibArgus library with the Cuda consumer. None of the documentation I could find indicates that any Cuda datatype can be converted to the ICaptureMetadata datatype so that getSensorTimestamp() could be called.
Thus, I have two questions, and I can resolve this issue if either one has an answer:
1). Can any Cuda datatype in a Cuda consumer be converted to the ICaptureMetadata datatype so that getSensorTimestamp() can be called on it?
2). Using an IFrameConsumer, what is the format of the image that is outputted, and can that image be converted to a Cuda datatype later so that I can continue to use the Cuda image conversion to BGR8 which has served us in the past?
Any advice is appreciated. Below is the relevant Cuda consumer code. It should look similar to those argus samples, so hopefully it is not too difficult to read.
class CudaFrameAcquire {
public:
CudaFrameAcquire(CUeglStreamConnection& connection);
~CudaFrameAcquire();
bool publish(bool leftFrame);
private:
CUeglStreamConnection& m_connection;
CUgraphicsResource m_resource;
CUeglFrame m_frame;
CUstream m_stream;
};
CudaFrameAcquire::CudaFrameAcquire(CUeglStreamConnection& connection)
: m_connection(connection)
, m_stream(NULL), m_resource(0) {
CUresult result = cuEGLStreamConsumerAcquireFrame(&m_connection, &m_resource, &m_stream, -1);
if (result == CUDA_SUCCESS) {
cuGraphicsResourceGetMappedEglFrame(&m_frame, m_resource, 0, 0);
}
}
CudaFrameAcquire::~CudaFrameAcquire() {
if (m_resource) {
cuEGLStreamConsumerReleaseFrame(&m_connection, m_resource, &m_stream);
}
}
bool CudaFrameAcquire::publish(bool leftFrame) {
CUDA_RESOURCE_DESC cudaResourceDesc;
memset(&cudaResourceDesc, 0, sizeof(cudaResourceDesc));
cudaResourceDesc.resType = CU_RESOURCE_TYPE_ARRAY;
cudaResourceDesc.res.array.hArray = m_frame.frame.pArray[0];
CUsurfObject cudaSurfObj1 = 0;
CUresult cuResult = cuSurfObjectCreate(&cudaSurfObj1, &cudaResourceDesc);
if (cuResult != CUDA_SUCCESS) {
ORIGINATE_ERROR("Unable to create surface object 1 (%s)", getCudaErrorString(cuResult));
}
cudaResourceDesc.res.array.hArray = m_frame.frame.pArray[1];
CUsurfObject cudaSurfObj2 = 0;
cuResult = cuSurfObjectCreate(&cudaSurfObj2, &cudaResourceDesc);
if (cuResult != CUDA_SUCCESS) {
ORIGINATE_ERROR("Unable to create surface object 2 (%s)", getCudaErrorString(cuResult));
}
//This calls our conversion code to BGR8
float delta = convert(cudaSurfObj1, cudaSurfObj2, m_frame.width, m_frame.height, oBuffer);
cuSurfObjectDestroy(cudaSurfObj1);
cuSurfObjectDestroy(cudaSurfObj2);
sensor_msgs::Image output;
//This is the inaccurate timestamp we hope to replace with the getSensorTimestamp() timestamp
output.header.stamp = ros::Time::now();
sensor_msgs::fillImage(output, sensor_msgs::image_encodings::BGR8, m_frame.height, m_frame.width, 3 * m_frame.width, (void*) oBuffer);
//We are using two cameras, so we publish to the respective ROS topic
if (leftFrame) {
left_image_pub.publish(output);
} else {
right_image_pub.publish(output);
}
return true;
}