Hi,
I am trying to load an TensorRT engine (.engine file) in a cpp program. For doing it I am trying to find and compile it with CMake in my Jetson Nano but I am a little lost and I don’t know how to do it exactly.
cmake_minimum_required(VERSION 3.9)
project(inference_in_trt)
find_package(CUDA)
list(APPEND PLUGINS "nvinfer")
list(APPEND PLUGINS "nvonnxparser")
list(APPEND PLUGINS "nvparsers")
foreach(libName ${PLUGINS})
find_library(${libName}_lib NAMES ${libName} "/usr" PATH_SUFFIXES lib)
list(APPEND PLUGIN_LIBS "${${libName}_lib}")
endforeach()
# Make project require C++11
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)
if(COMPILER_SUPPORTS_CXX11)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c14" )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14")
elseif(COMPILER_SUPPORTS_CXX0X)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
endif()
include_directories(${CUDA_INCLUDE_DIRS})
add_executable(inference trt_inference.cpp)
target_link_libraries(inference ${CUDA_INCLUDE_DIRS})
install(TARGETS inference DESTINATION bin)
How could I include the undefined reference? I got this error with CMake:
[ 50%] Building CXX object CMakeFiles/inference.dir/trt_inference.cpp.o
[100%] Linking CXX executable inference
CMakeFiles/inference.dir/trt_inference.cpp.o: In function `nvinfer1::(anonymous namespace)::createInferRuntime(nvinfer1::ILogger&)':
trt_inference.cpp:(.text+0x14): undefined reference to `createInferRuntime_INTERNAL'
collect2: error: ld returned 1 exit status
CMakeFiles/inference.dir/build.make:94: recipe for target 'inference' failed
make[2]: *** [inference] Error 1
CMakeFiles/Makefile2:67: recipe for target 'CMakeFiles/inference.dir/all' failed
make[1]: *** [CMakeFiles/inference.dir/all] Error 2
Makefile:129: recipe for target 'all' failed
make: *** [all] Error 2
My Jetson Nano runs with Jetpack 4.3, so my version is TensorRT 6. I just want to deserialize the .engine file.
Thank you for your time