Publishing RCB images using nvsensor_producer

Please provide the following info (check/uncheck the boxes after creating this topic):
Software Version
DRIVE OS Linux 5.2.6
DRIVE OS Linux 5.2.6 and DriveWorks 4.0
DRIVE OS Linux 5.2.0
DRIVE OS Linux 5.2.0 and DriveWorks 3.5
NVIDIA DRIVE™ Software 10.0 (Linux)
NVIDIA DRIVE™ Software 9.0 (Linux)
other DRIVE OS version
other

Target Operating System
Linux
QNX
other

Hardware Platform
NVIDIA DRIVE™ AGX Xavier DevKit (E3550)
NVIDIA DRIVE™ AGX Pegasus DevKit (E3550)
other

SDK Manager Version
1.9.1.10844
other

Host Machine Version
native Ubuntu 18.04
other

Hi,
We are trying to publish RCB images for the lightnet ros node since it works only with RCB format.
Our camera is ar0231-rccb-bae-sf3324 which provides rccb frames as raw images.
We use this existing nvsensor node which publishes RGBA images from our rccb sensor which is not supported for lightnet.
We have altered this node to publish raw images under RCB format based on simple_camera_gmsl_raw module in driveworks-2.2.
Is our implementation right?
Do we need to use CUDA to get raw images?
Kindly guide in case we are not in right direction.
The code follows.

#include "camera.h"
#include "sensor_msgs/Image.h"
#include "sensor_msgs/image_encodings.h"
#include "sensor_msgs/fill_image.h"

using namespace sensor_msgs;
namespace nv {
  void SensorCamera::initialize(dwContextHandle_t context, dwSALHandle_t hal)
  {
    m_sdk = context;
    m_hal = hal;
    m_cameraRun = false;
  }
  bool SensorCamera::start(dwSensorParams paramsClient)
  {
    dwStatus status;
    //------------------------------------------------------------------------------
    // initializes cameras
    // -----------------------------------------
    {
      status = dwSAL_createSensor(&m_camera[0], paramsClient, m_hal);
      if(status != DW_SUCCESS) {
        ROS_ERROR("Cannot create sensor %s with %s. Error: %s", paramsClient.protocol, paramsClient.parameters, dwGetStatusName(status));
        return false; 
      }
    }
    dwImageProperties imageProperties{};  
    status = dwSensorCamera_getImageProperties(&imageProperties, DW_CAMERA_OUTPUT_NATIVE_RAW, m_camera[0]);
    if(status != DW_SUCCESS) {
      ROS_ERROR("Error");
      dwSAL_releaseSensor(m_camera[0]);
      return false;
    }
    imageProperties.format = DW_IMAGE_FORMAT_RAW_UINT16; 
    // ISP added
   
    m_ispOutput=DW_SOFTISP_PROCESS_TYPE_DEMOSAIC;
    dwSoftISPParams softISPParams;
    CHECK_DW_ERROR(dwSoftISP_initParamsFromCamera(&softISPParams, &imageProperties));
    CHECK_DW_ERROR(dwSoftISP_initialize(&m_isp, &softISPParams, m_sdk));

    if (m_ispOutput & DW_SOFTISP_PROCESS_TYPE_DEMOSAIC) {
        dwSoftISP_setDemosaicMethod(DW_SOFTISP_DEMOSAIC_METHOD_INTERPOLATION, m_isp);
    }


    //------------------------------------------------------------------------------
    // initializes camera
    // - the SensorCamera module
    // -----------------------------------------
        // we need to allocate memory for a demosaic image and bind it to the ISP
   
    dwImageProperties rcbProperties{};
    if (m_ispOutput & DW_SOFTISP_PROCESS_TYPE_DEMOSAIC) {
        // getting the properties directly from the ISP
        CHECK_DW_ERROR(dwSoftISP_getDemosaicImageProperties(&rcbProperties, m_isp));
        CHECK_DW_ERROR(dwImage_create(&m_rcbImage, rcbProperties, m_sdk));
        CHECK_DW_ERROR(dwImage_getCUDA(&m_rcbCUDAImage, m_rcbImage));
        // bind the image as the output for demosaic process to the ISP, will be filled at the call of
        // dwSoftISP_processDeviceAsync
        CHECK_DW_ERROR(dwSoftISP_bindOutputDemosaic(m_rcbCUDAImage, m_isp));
    }

    //m_screenshot.reset(new ScreenshotHelper(m_sdk, m_sal, getWindowWidth(), getWindowHeight(), "CameraGMSL_Raw"));

    

    // create an image to hold the conversion from native to rgba, fit for streaming to gl
    status = dwImage_create(&m_rawFrame[0], imageProperties, m_sdk);
    if(status != DW_SUCCESS) {
      ROS_ERROR("Error");

      dwSAL_releaseSensor(m_camera[0]);

      return false;
    }
    // in order ot visualize we prepare the properties of the tonemapped image


    // setup streamer for frame grabbing
    status = dwImageStreamer_initialize(&m_streamerNvmediaToCpuProcessed[0], &imageProperties, DW_IMAGE_CPU, m_sdk);
    if(status != DW_SUCCESS) {
      ROS_ERROR("Error");

      dwImage_destroy(m_rawFrame[0]);
      dwSAL_releaseSensor(m_camera[0]);

      return false;
    }

    status = dwSensor_start(m_camera[0]);
    if(status != DW_SUCCESS) {
      ROS_ERROR("Cannot start camera. Error: %s", dwGetStatusName(status));

      dwSAL_releaseSensor(m_camera[0]);

      return false;
    }

    m_cameraThread   = std::thread(&SensorCamera::run_camera, this);
    m_cameraRun = true;

    return true;
  }

  bool SensorCamera::stop()
  {
    if(!m_cameraRun) {
      ROS_WARN("CAMERA sensor not running");
      return false;
    }

    m_cameraRun = false;

    if(m_cameraThread.joinable())
      m_cameraThread.join();
    if (m_streamerNvmediaToCpuProcessed[0]) {
      dwImageStreamer_release(m_streamerNvmediaToCpuProcessed[0]);
    }
    if (m_rawFrame[0]) {
      dwImage_destroy(m_rawFrame[0]);
    } 
    if (m_camera[0]) {
      dwSAL_releaseSensor(m_camera[0]);
    }
    return true;
  }
  void SensorCamera::run_camera()
  {
    while(m_cameraRun) {
      dwCameraFrameHandle_t frame;
      uint32_t sibling=0; 
      //dwStatus status = dwSensorCamera_readFrameNew(&frame, 33333, m_camera[0]);
			dwStatus status = dwSensorCamera_readFrame(&frame, sibling, 33333, m_camera[0]);  
      if (status == DW_END_OF_STREAM) {
        ROS_WARN("camera sensor end of stream reached.");
        break;
      } else if (status == DW_TIME_OUT) {
        ROS_WARN("camera sensor readFrame timed-out.");
        continue;
      } else if (status == DW_NOT_READY) {
        ROS_WARN("camera sensor not ready.");
        continue;
      } 
      if (status != DW_SUCCESS) {
        ROS_ERROR("camera sensor readFrame failed. Error: %s", dwGetStatusName(status));
        break;
      } else {
        ROS_INFO("camera sensor readFrame success.");
        dwImageHandle_t raw_img;
        dwCameraOutputType outputTypeRaw = DW_CAMERA_OUTPUT_NATIVE_RAW;
        status = dwSensorCamera_getImage(&raw_img, outputTypeRaw, frame);
        if(status != DW_SUCCESS) {
          ROS_ERROR("Error");
          break;
        }
        status = dwImageStreamer_producerSend(raw_img, m_streamerNvmediaToCpuProcessed[0]);
        if(status != DW_SUCCESS) {
          ROS_ERROR("Error");
          break;
        }
        // receive the streamed image as a handle
        dwImageHandle_t cpuFrameraw;
        status = dwImageStreamer_consumerReceive(&cpuFrameraw, 33000, m_streamerNvmediaToCpuProcessed[0]);
        if(status != DW_SUCCESS) {
          ROS_ERROR("Error");
          break;
        }
        dwImageProperties prop;
        status = dwImage_getProperties(&prop, cpuFrameraw);
        if (status != DW_SUCCESS) {
          ROS_ERROR("dwImage_getProperties() failed. Error: %s", dwGetStatusName(status));
          break;
        }
        dwImageCPU *imgCPUraw;
        status = dwImage_getCPU(&imgCPUraw, cpuFrameraw);
        if (status != DW_SUCCESS) {
          ROS_ERROR("dwImage_getCPU() failed. Error: %s", dwGetStatusName(status));
          break;
        }
        unsigned int pair_id = 0;
        ImagePtr imageRaw(new Image);
        dwTime_t timestamp;
        dwImage_getTimestamp(&timestamp, raw_img);
        imageRaw->header.stamp.sec = (timestamp / 1000000L);
        imageRaw->header.stamp.nsec = (timestamp % 1000000L) * 1000;
        ROS_DEBUG("timestamp:  %u.%u", imageRaw->header.stamp.sec, imageRaw->header.stamp.nsec);
        imageRaw->header.seq = pair_id;
        pair_id++;
        imageRaw->header.frame_id = "camera";
        fillImage(*imageRaw, sensor_msgs::image_encodings::RGB8, prop.height, prop.width, 4 * prop.width, imgCPUraw->data[0]);
        dwImageStreamer_consumerReturn(&cpuFrameraw, m_streamerNvmediaToCpuProcessed[0]);
        dwImageStreamer_producerReturn(nullptr, 33000, m_streamerNvmediaToCpuProcessed[0]);
        dwSensorCamera_returnFrame(&frame);
        m_cameraRawPub.publish(imageRaw);
      }
    }
  }
}

Dear @s-s.radeshwar,
No. You can get RAW image from Camera GMSL RAW sample.