Make error hidden symbol `cudaMemcpyAsync'

[ 33%] Linking CXX executable simple_example
/usr/bin/ld: simple_example: hidden symbol `cudaMemcpyAsync’ in /usr/local/cuda-10.0/lib64/libcudart_static.a(libcudart_static.a.o) is referenced by DSO
/usr/bin/ld: final link failed: Bad value
collect2: error: ld returned 1 exit status
CMakeFiles/simple_example.dir/build.make:143: recipe for target ‘simple_example’ failed
make[2]: *** [simple_example] Error 1
CMakeFiles/Makefile2:67: recipe for target ‘CMakeFiles/simple_example.dir/all’ failed
make[1]: *** [CMakeFiles/simple_example.dir/all] Error 2
Makefile:83: recipe for target ‘all’ failed
make: *** [all] Error 2

Here is the cmakelist.
cmake_minimum_required(VERSION 2.8)
project(simple_example)
set(CMAKE_BUILD_TYPE Release)
set(SOURCE_FILES tesorrt_drnet.cpp)
include_directories(/usr/src/tensorrt/samples/common/)
set(CUDA_NVCC_FLAGS {CUDA_NVCC_FLAGS}; -O3 -gencode arch=compute_53,code=sm_53 -gencode arch=compute_62,code=sm_62 ) find_package(OpenCV REQUIRED) include_directories( {OPENCV_INCLUDE_DIR} )

CUDA Configuration

find_package(CUDA REQUIRED)

set(CUDA_VERBOSE_BUILD ON)

Specify the cuda host compiler to use the same compiler as cmake.

set(CUDA_HOST_COMPILER ${CMAKE_CXX_COMPILER})

TensorRT

find_path(TENSORRT_INCLUDE_DIR NvInfer.h
HINTS {TENSORRT_ROOT} {CUDA_TOOLKIT_ROOT_DIR}
PATH_SUFFIXES include)
MESSAGE(STATUS “Found TensorRT headers at {TENSORRT_INCLUDE_DIR}") find_library(TENSORRT_LIBRARY_INFER nvinfer HINTS {TENSORRT_ROOT} {TENSORRT_BUILD} {CUDA_TOOLKIT_ROOT_DIR}
PATH_SUFFIXES lib lib64 lib/x64)
find_library(TENSORRT_LIBRARY_INFER_PLUGIN nvinfer_plugin
HINTS {TENSORRT_ROOT} {TENSORRT_BUILD} {CUDA_TOOLKIT_ROOT_DIR} PATH_SUFFIXES lib lib64 lib/x64) find_library(TENSORRT_LIBRARY_PARSER nvparsers HINTS {TENSORRT_ROOT} {TENSORRT_BUILD} {CUDA_TOOLKIT_ROOT_DIR}
PATH_SUFFIXES lib lib64 lib/x64)
set(TENSORRT_LIBRARY {TENSORRT_LIBRARY_INFER} {TENSORRT_LIBRARY_INFER_PLUGIN} {TENSORRT_LIBRARY_PARSER}) MESSAGE(STATUS "Find TensorRT libs at {TENSORRT_LIBRARY}”)
find_package_handle_standard_args(
TENSORRT DEFAULT_MSG TENSORRT_INCLUDE_DIR TENSORRT_LIBRARY)
if(NOT TENSORRT_FOUND)
message(ERROR “Cannot find TensorRT library.”)
endif()

if (NVINTERNAL)
set(ONNX_INCLUDE_DIR {PROJECT_SOURCE_DIR}/parsers{NVINTERNAL_SUFFIX}/onnx CACHE STRING “Onnx include directory”)
else()
set(ONNX_INCLUDE_DIR {PROJECT_SOURCE_DIR}/parsers{NVINTERNAL_SUFFIX}/onnx CACHE STRING “Onnx include directory”)
endif()

set(SAMPLE_DEP_LIBS

nvinfer

nvonnxparser

)

set(CMAKE_CXX_FLAGS “${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Ofast -Wfatal-errors -D_MWAITXINTRIN_H_INCLUDED”) # -std=gnu++11

list(APPEND CUDA_NVCC_FLAGS “-D_FORCE_INLINES -Xcompiler -fPIC”)

CUDA_INCLUDE_DIRECTORIES({CUDNN_INCLUDE_DIR} {TENSORRT_INCLUDE_DIR} {ONNX_INCLUDE_DIR} {CUDA_INCLUDE_DIRS})
CUDA_ADD_EXECUTABLE(simple_example tesorrt_drnet.cpp /usr/src/tensorrt/samples/common/logger.cpp)

target_link_libraries(simple_example
${SAMPLE_DEP_LIBS}
-Wl,–unresolved-symbols=ignore-in-shared-libs
)

target_link_libraries(simple_example {TENSORRT_LIBRARY} {OpenCV_LIBS} ${CUDA_LIBRARIES} /usr/local/cuda/lib64/libcudart.so)

can you help me fix it?

Hi,

The error indicates that the CUDA doesn’t be linked correctly.
Please use cuda_add_library rather target_link_libraries in the CMakeLists.txt.

find_package(CUDA)
cuda_add_library(simple_example SHARED)
...

Thanks.

But I have run it on the computer with tensorrt 6.0. It is sucessful. cuda_add_library can generate the .so files. But when using CUDA_ADD_EXECUTABLE, always make errors.

Hi,

Would you mind to share the complete simple_example with us?
So we can check the source directly?

Thanks.

Hi mzh0317,

Is this still an issue to support? Any result can be shared?