Send camera data to other device over IPC

Hi, my goal is to send camera data from the DDPX over ethernet to another device.

My plan is to send the camera stream over IPC and have used the sample code to put this together.
Now I’m at the point where I need to pass the image stream to the IPC send function.
Is this the right way of implementing what I’m trying to achieve?

And which datatype should be passed to the IPC send function?

Thanks in advance.

Dear omid_volvo,
It is not clear which IPC function you are asking. Are you asking about CUDA IPC functions?

No I’m using the Driveworks IPC. The plan is to have a buffer or something that is initialized in the main function that can be passed to the runServer function to access the buffer and send it to a client (not implemented yet). Check my code below:

#include <dw/core/Context.h>
#include <dw/core/VersionCurrent.h>
#include <dw/ipc/SocketClientServer.h>

#include "../framework/Log.hpp"
#include "../framework/Checks.hpp"

#include <signal.h>

#include <iostream>
#include <thread>

#include <dw/core/Logger.h>
#include <dw/core/VersionCurrent.h>
#include <dw/core/NvMedia.h>

// HAL
#include <dw/sensors/Sensors.h>
#include <dw/sensors/SensorSerializer.h>
#include <dw/sensors/camera/Camera.h>

// Image
#include <dw/image/ImageStreamer.h>

// Renderer
#include <dw/visualization/Renderer.h>

//------------------------------------------------------------------------------
// Variables of working/debugging status of the program.
//------------------------------------------------------------------------------
static volatile bool g_run = true;

//------------------------------------------------------------------------------
// Method declarations
//------------------------------------------------------------------------------
void sig_int_handler(int)
{
    g_run = false;
}

void loggerCallback(dwContextHandle_t context, dwLoggerVerbosity type,
  const char* msg)
{
    fprintf(stdout, "%s", msg);
    fflush(stdout);
}


dwStatus runServer(dwContextHandle_t ctx)
{
    auto status = DW_FAILURE;

    auto port = static_cast<uint16_t>(49252);

    auto socketServer = dwSocketServerHandle_t{DW_NULL_HANDLE};
    CHECK_DW_ERROR(dwSocketServer_initialize(&socketServer, port, 2, ctx));

    // accept two connections (use two connections for illustration,
    // a single connection can also be used bi-directionally)
    auto socketConnectionRead  = dwSocketConnectionHandle_t{DW_NULL_HANDLE},
         socketConnectionWrite = dwSocketConnectionHandle_t{DW_NULL_HANDLE};
    do {
        status = dwSocketServer_accept(&socketConnectionRead, 10000, socketServer);
    } while (status == DW_TIME_OUT);
    do {
        status = dwSocketServer_accept(&socketConnectionWrite, 10000, socketServer);
    } while (status == DW_TIME_OUT);

    if (status != DW_FAILURE) {
        while (g_run) {
            size_t data;
            auto size = sizeof(decltype(data));

            // receive data
            std::this_thread::sleep_for(std::chrono::milliseconds(std::rand() % 500));
            if ((status = dwSocketConnection_recv(reinterpret_cast<uint8_t *>(&data), &size,
                                                  socketConnectionRead)) == DW_END_OF_STREAM)
                break;
            CHECK_DW_ERROR(status);

            if (size != sizeof(decltype(data)))
                break;

            std::cout << "Socket Server received " << data << std::endl;

            // SEND CAMERA DATA HERE
            // send data back
            std::this_thread::sleep_for(std::chrono::milliseconds(std::rand() % 500));
            if ((status = dwSocketConnection_send(reinterpret_cast<uint8_t *>(&data), &size,
                                                  socketConnectionWrite)) == DW_END_OF_STREAM)
                break;
            CHECK_DW_ERROR(status);

            if (size != sizeof(decltype(data)))
                break;

            std::cout << "Socket Server send " << data << std::endl;
        }
    }

    CHECK_DW_ERROR(dwSocketConnection_release(socketConnectionWrite));
    CHECK_DW_ERROR(dwSocketConnection_release(socketConnectionRead));
    CHECK_DW_ERROR(dwSocketServer_release(socketServer));

    return DW_SUCCESS;
}

dwStatus get_camera(dwContextHandle_t m_sdk)
{
    // handles
    dwSALHandle_t m_sal = DW_NULL_HANDLE;
    //dwRendererHandle_t m_renderer = DW_NULL_HANDLE;
    dwSensorSerializerHandle_t m_serializer;

    dwLogger_initialize(loggerCallback);
    dwLogger_setLogLevel(DW_LOG_VERBOSE);

    dwContextParameters sdkParams = {};
    dwVersion sdkVersion;
    dwGetVersion(&sdkVersion);
    dwStatus status = dwInitialize(&m_sdk, sdkVersion, &sdkParams);

    if (status != DW_SUCCESS)
    {
        std::cerr << "Cannot init SDK" << std::endl;
        return status;
    }

    std::cout << "Context of Driveworks SDK successfully initialized." << std::endl;

    //Initialize SAL
    dwSAL_initialize(&m_sal, m_sdk);

    // Create sensor parameters
    dwSensorHandle_t m_camera = DW_NULL_HANDLE;
    dwSensorParams params;
    params.parameters = "output-format=yuv,camera-type=ar0231-rccb-bae-sf3324,camera-group=a";
    params.protocol = "camera.gmsl";
    dwSAL_createSensor(&m_camera, params, m_sal);

    //Start the sensor
    dwSensor_start(m_camera);

    //Initialize serializer
    dwSerializerParams serializerParams;
    serializerParams.parameters = "";
    std::string newParams = "";
    newParams += std::string("format=") + std::string("h264");
    newParams += std::string(",bitrate=") + std::string("8000000");
    newParams += std::string(",framerate=") + std::string("30");
    newParams += std::string(",type=disk,file=") + std::string("/home/nvidia/ddpx.h264");
    newParams += std::string(",slave=") + std::string("0");

    serializerParams.parameters = newParams.c_str();
    serializerParams.onData = nullptr;

    CHECK_DW_ERROR(dwSensorSerializer_initialize(&m_serializer, &serializerParams, m_camera));
    CHECK_DW_ERROR(dwSensorSerializer_start(m_serializer));

    bool loop = true;
    dwCameraFrameHandle_t frame = DW_NULL_HANDLE;
    dwTime_t timeout = 33000;

    while (loop)
    {

        dwStatus status = DW_NOT_READY;
        do
        {
            status = dwSensorCamera_readFrame(&frame, 0, timeout, m_camera);
        } while (status == DW_NOT_READY);
        // get an image with the desired output format
        dwImageHandle_t image;
        dwSensorCamera_getImage(&image, DW_CAMERA_OUTPUT_NATIVE_PROCESSED, frame);
        //dwSensorSerializer_serializeCameraFrameAsync(frame, m_serializer);
        dwSensorSerializer_serializeImageAsync(image, m_serializer);

        dwSensorCamera_returnFrame(&frame);
    }

    // release used objects in correct order
    if (m_camera)
    {
        CHECK_DW_ERROR(dwSensor_stop(m_camera));
        CHECK_DW_ERROR(dwSAL_releaseSensor(m_camera));
    }

    CHECK_DW_ERROR(dwSAL_release(m_sal));
    CHECK_DW_ERROR(dwRelease(m_sdk));
    CHECK_DW_ERROR(dwLogger_release());
    std::cout << "*** Program exiting ***\n"
              << std::endl;

    return status;
}

int main(int argc, const char **argv)
{
    (void)argv;
    (void)argc;
    
    #if (!WINDOWS)
    struct sigaction action = {};
    action.sa_handler       = sig_int_handler;

    sigaction(SIGHUP, &action, NULL);  // controlling terminal closed, Ctrl-D
    sigaction(SIGINT, &action, NULL);  // Ctrl-C
    sigaction(SIGQUIT, &action, NULL); // Ctrl-\, clean quit with core dump
    sigaction(SIGABRT, &action, NULL); // abort() called.
    sigaction(SIGTERM, &action, NULL); // kill command
    sigaction(SIGSTOP, &action, NULL); // kill command
    #endif
    g_run = true;

    // Initialize context
    auto ctx = dwContextHandle_t{DW_NULL_HANDLE};
    {
        CHECK_DW_ERROR(dwLogger_initialize(loggerCallback));
        CHECK_DW_ERROR(dwLogger_setLogLevel(DW_LOG_VERBOSE));
        CHECK_DW_ERROR(dwInitialize(&ctx, DW_VERSION, nullptr));
    }

    get_camera(ctx);
    //runServer(ctx);

    CHECK_DW_ERROR(dwRelease(ctx));

    return 0;
}

Dear omid_volvo,
Did you check sending Image data an array? You may convert the Image to CUDA RGBA array on server side and send it to client side for processing.

Hi again,

We found out we can’t send using IPC, but how would you recommend me to send each frame over ethernet?
For example send each frame using gstreamer. Do you have any code examples?

Thanks in advance.

Dear omid_volvo,
Unfortunately GStreamer is not supported on Drive platform. May I know what is your use case and why you looking for this functionality

The goals is to capture from camera and send the lraw frames over ethernet to a logging device

Is there a sample where I can access the buffer for the serialized camera frame? So that I can send the buffer containing the lraw data for a frame. I have created the sending part, just need to access the buffer now

I found out by checking the SensorSerializer.h and the posts below.
By providing type=user and serializerParams.onData to point to a the serializer callback function, you can access the buffer with serialized data.

https://devtalk.nvidia.com/default/topic/1044546/general/can-data-in-asc-format-/post/5299849/#5299849
https://devtalk.nvidia.com/default/topic/1003201/general/how-to-get-gmsl-sensor-yuv-data/