Argus example cmake compilation issue

Hi Folks,

I am looking to modify an existing argus example (copied from argus/samples/denoise). Could someone please point out my mistake in steps below.

Please note that I not trying to mix up with existing argus/samples area. I am able to build from argus/build/samples/denoise perfectly fine.

Thanks

  1. cd ~; mkdir argus_example1 ; cd argus_example1
  2. cp ~/Downloads/argus/samples/denoise/* .
  3. mkdir build ; cd build
  4. ubuntu@tegra-ubuntu:~/work/exp/argus-capture1/build$ cmake -DCMAKE_MODULE_PATH=/home/ubuntu/tegra_multimedia_api/argus …
    CMake Error at CMakeLists.txt:33 (find_package):
    By not providing “FindArgus.cmake” in CMAKE_MODULE_PATH this project has
    asked CMake to find a package configuration file provided by “Argus”, but
    CMake did not find one.

Could not find a package configuration file provided by “Argus” with any of
the following names:

ArgusConfig.cmake
argus-config.cmake

Add the installation prefix of “Argus” to CMAKE_PREFIX_PATH or set
“Argus_DIR” to a directory containing one of the above files. If “Argus”
provides a separate development package or SDK, be sure it has been
installed.

– Configuring incomplete, errors occurred!

Hi dumbogeorge
Could you move the denoise to the tegra_multimedia_api/argus/sameples/ to try

Hi ShaneCCC,
I am not able to build existing samples in tegra_multimedia_api/argus/samples area.
Thanks

ubuntu@tegra-ubuntu:~/tegra_multimedia_api/argus/samples/denoise/build$ cmake ..
-- The C compiler identification is GNU 5.4.0
-- The CXX compiler identification is GNU 5.4.0
-- Check for working C compiler: /usr/bin/cc
-- Check for working C compiler: /usr/bin/cc -- works
-- Detecting C compiler ABI info
-- Detecting C compiler ABI info - done
-- Detecting C compile features
-- Detecting C compile features - done
-- Check for working CXX compiler: /usr/bin/c++
-- Check for working CXX compiler: /usr/bin/c++ -- works
-- Detecting CXX compiler ABI info
-- Detecting CXX compiler ABI info - done
-- Detecting CXX compile features
-- Detecting CXX compile features - done
CMake Error at CMakeLists.txt:33 (find_package):
  By not providing "FindArgus.cmake" in CMAKE_MODULE_PATH this project has
  asked CMake to find a package configuration file provided by "Argus", but
  CMake did not find one.

  Could not find a package configuration file provided by "Argus" with any of
  the following names:

    ArgusConfig.cmake
    argus-config.cmake

  Add the installation prefix of "Argus" to CMAKE_PREFIX_PATH or set
  "Argus_DIR" to a directory containing one of the above files.  If "Argus"
  provides a separate development package or SDK, be sure it has been
  installed.


-- Configuring incomplete, errors occurred!
See also "/home/ubuntu/tegra_multimedia_api/argus/samples/denoise/build/CMakeFiles/CMakeOutput.log".

I am looking to modify an existing sample from /home/ubuntu/Downloads/argus/samples area. I am using functions from /home/ubuntu/tegra_multimedia_api/include/nvbuf_utils.h, and would like to include that file. However it seems that /home/ubuntu/tegra_multimedia_api/include/ is not in the search path.

Is there a way I can add /home/ubuntu/tegra_multimedia_api/include/ to header search path(s) ? Which (in /home/ubuntu/Downloads/argus/build/ area ) CMakeLists.txt should I edit ?

Thanks

Folks,

In order to map the frame read out of cameras to cv::Mat, I am using NvBufferGetParams function. I am having hard time trying to get it to compile. I have been unable to find source of NvBufferGetParams. Could you please help ?

I modified (~/Downloads/argus/samples/gstVideoEncode/CMakeLists.txt) gstVideoEncode example. Because we need to record and processing frames in openCV at the same time.

cmake_minimum_required (VERSION 2.6)

project(argus_gstvideoencode)

set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake" "${CMAKE_MODULE_PATH}")
set(CMAKE_C_FLAGS "-std=c++11")


find_package(Argus REQUIRED)

set(SOURCES
    main.cpp
    )

include_directories(
    ${ARGUS_INCLUDE_DIR}
    ${EGL_INCLUDE_DIR}
    ${OPENGLES_INCLUDE_DIR}
    ${CMAKE_SOURCE_DIR}/samples/utils
    /home/ubuntu/tegra_multimedia_api/include/
    )

add_executable(${PROJECT_NAME} ${SOURCES})

link_directories(
    ${GTK3_LIBRARY_DIRS}
    /usr/lib/aarch64-linux-gnu/tegra 
    /usr/local/cuda-8.0/lib64
    )

# Begin of gstreamer
find_package(PkgConfig REQUIRED)

pkg_check_modules(GSTREAMER REQUIRED gstreamer-1.0)
include_directories(${GSTREAMER_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} ${GSTREAMER_LIBRARIES})

pkg_check_modules(GLIB REQUIRED glib-2.0)
include_directories(${GLIB_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} ${GLIB_LIBRARIES})
# End of gstreamer

add_library(libnvbuf_utils.so SHARED IMPORTED)
target_link_libraries(${PROJECT_NAME}
    ${ARGUS_LIBRARIES}
    argussampleutils
    )

install(TARGETS ${PROJECT_NAME} DESTINATION bin)

include(CPack)

My CMakeLists.txt looks like this

ubuntu@tegra-ubuntu:~/Downloads/argus/build/samples/gstVideoEncode$ make
-- Configuring done
-- Generating done
-- Build files have been written to: /home/ubuntu/Downloads/argus/build
[ 80%] Built target argussampleutils
[ 86%] Building CXX object samples/gstVideoEncode/CMakeFiles/argus_gstvideoencode.dir/main.cpp.o
[ 93%] Linking CXX executable argus_gstvideoencode
/usr/bin/ld: /usr/lib/aarch64-linux-gnu/tegra/libargus_socketclient.so: undefined reference to symbol 'NvBufferGetParams'
/usr/lib/aarch64-linux-gnu/tegra/libnvbuf_utils.so.1.0.0: error adding symbols: DSO missing from command line
collect2: error: ld returned 1 exit status
samples/gstVideoEncode/CMakeFiles/argus_gstvideoencode.dir/build.make:106: recipe for target 'samples/gstVideoEncode/argus_gstvideoencode' failed
make[2]: *** [samples/gstVideoEncode/argus_gstvideoencode] Error 1
CMakeFiles/Makefile2:979: recipe for target 'samples/gstVideoEncode/CMakeFiles/argus_gstvideoencode.dir/all' failed
make[1]: *** [samples/gstVideoEncode/CMakeFiles/argus_gstvideoencode.dir/all] Error 2
Makefile:138: recipe for target 'all' failed
make: *** [all] Error 2

Hi dumbogeorge
I think the tegra_multimedia_api/samples/11_camera_object_identification/ should much better for your reference.

Hi ShaneLLL,

I am following 11_camera_object_identification example. I am looking to get pointer to the frame buffer.

In the code below is there a way we can go directly from “fd” to the pointer to the frame buffer ?

// Acquire a frame.
        UniqueObj<Frame> frame(iFrameConsumer->acquireFrame());
        IFrame *iFrame = interface_cast<IFrame>(frame);
        if (!iFrame)
            break;

        // Get the IImageNativeBuffer extension interface and create the fd.
        NV::IImageNativeBuffer *iNativeBuffer =
            interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
        if (!iNativeBuffer)
            ORIGINATE_ERROR("IImageNativeBuffer not supported by Image.");
        fd = iNativeBuffer->createNvBuffer(Size(ctx.width, ctx.height),
                                           NvBufferColorFormat_YUV420,
                                           NvBufferLayout_BlockLinear);

I was given a patch (for camera reading) using 11_camera_object_identification (https://devtalk.nvidia.com/default/topic/1010111/jetson-tx1/nvmm-memory/3) by DaneLLL. I tried that and it works.

I am trying to comprehend, following, please help with your explaination.

  1. Cameras are set to produce YUV_420_888
// Create the OutputStream.
    PRODUCER_PRINT("Creating output stream\n");
    UniqueObj<OutputStreamSettings> streamSettings(iCaptureSession->createOutputStreamSettings());
    IOutputStreamSettings *iStreamSettings = interface_cast<IOutputStreamSettings>(streamSettings);
    if (iStreamSettings)
    {
        iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
        iStreamSettings->setResolution(Size(p_ctx->width, p_ctx->height));
    }
  1. The converter is trying reading camera and trying to convert from RGBA to BRG. I am not able to understand how is camera giving out RGBA when (in the code above) the output pixel format is set to YUV420 ?
extern "C" void
opencv_img_processing(void *opencv_handler, uint8_t *pdata, int32_t width, int32_t height)
{
    struct opencv_sample_handler *handler =
        (struct opencv_sample_handler *) opencv_handler;

    cv::Mat imgbuf = cv::Mat(height, width, CV_8UC4, pdata);
    cv::Mat display_img;

    if (handler->detector_busy == 0) {
        //handler->detecting_mat = imgbuf.clone();
        imgbuf.copyTo(handler->detecting_mat);
        handler->detector_busy = 1;
    }

    struct timeval tp;
    gettimeofday(&tp, NULL);
    long start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
    cvtColor(imgbuf, display_img, CV_RGBA2BGR);
    gettimeofday(&tp, NULL);
    long end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
    handler->milliseconds = end - start;
    if (1/*handler->result_update_flag*/)
    {
        ostringstream ss;
        ss.str("");
        ss << "FPS = " << std::fixed << std::setprecision(0)
             << 1000. / handler->milliseconds;
        matPrint(display_img, 0, CV_RGB(255,0,0), ss.str());
#if 0
        ss.str("");
        ss << std::fixed << std::setprecision(2) << handler->first_prob << " - " << handler->first_result;
        matPrint(display_img, 1, CV_RGB(255,0,0), ss.str());
        ss.str("");
        ss << std::fixed << std::setprecision(2) << handler->second_prob << " - " << handler->second_result;
        matPrint(display_img, 2, CV_RGB(255,0,0), ss.str());
#endif
    }
    gettimeofday(&tp, NULL);
    start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
    cv::imshow("img", display_img);
    waitKey(1);
    gettimeofday(&tp, NULL);
    end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
    cout << "cvtColor took " << handler->milliseconds << "ms, ";
    cout << "imshow took " << end - start << "ms" << endl;
    return;
}

Hi dumbogeorge
Suppose NvVideoConverter do the jobs.

bool ConsumerThread::createImageConverter()
{
    int ret = 0;

    // YUV420 --> RGB32 converter
    m_ImageConverter = NvVideoConverter::createVideoConverter("conv");
    if (!m_ImageConverter)
        ORIGINATE_ERROR("Could not create m_ImageConverteroder");

    if (DO_STAT)
        m_ImageConverter->enableProfiling();

    m_ImageConverter->capture_plane.
        setDQThreadCallback(converterCapturePlaneDqCallback);
    m_ImageConverter->output_plane.
        setDQThreadCallback(converterOutputPlaneDqCallback);

    ret = m_ImageConverter->setOutputPlaneFormat(V4L2_PIX_FMT_YUV420M, m_pContext->width,
                                    m_pContext->height, V4L2_NV_BUFFER_LAYOUT_BLOCKLINEAR);
    if (ret < 0)
        ORIGINATE_ERROR("Could not set output plane format");

    ret = m_ImageConverter->setCapturePlaneFormat(V4L2_PIX_FMT_ABGR32, m_pContext->width,
                                    m_pContext->height, V4L2_NV_BUFFER_LAYOUT_PITCH);