Hi
I am trying to build docker image to setup/use nvidia gstreamer plugins (nvvidconv , nvoverlaysink).
With below Dockerfile ,nvidia gstreamer are not found .
Checked with gst-inspect command,it returns “no such element or plugin”
Please let me know if I am missing anything here.
Do I need to install anything specific to use nvidia gstreamer plugins ?
Dockerfile
# Start from the latest bionic (Ubuntu 18.04) release available at the moment
FROM arm64v8/ubuntu:bionic-20190307
# The JETPACK_URL paths used here comes from the jetson_downloads/repository.json file when you run the regular jetpack installer on an x64 linux host
# Use Jetpack 4.1.1
ARG JETPACK_URL=https://developer.download.nvidia.com/devzone/devcenter/mobile/jetpack_l4t/4.1.1/xddsn.im/JetPackL4T_4.1.1_b57/
RUN apt update
RUN apt upgrade -y
RUN apt install -y apt-utils
RUN apt install -y git cmake sudo vim curl libexpat1-dev build-essential libgtk-3-dev libjpeg-dev libv4l-dev libgstreamer1.0-dev
RUN apt autoremove -y
# Get the cuda 9.0 package public key, it comes with the cuda debian install file, but
# it seems like it is not installed in the correct order. We'll steal the same key from the x86_64 repo manually
# since there isn't one specific to the ARM processor
RUN mkdir /var/cuda-repo-10-0-local/
WORKDIR /var/cuda-repo-10-0-local/
RUN curl https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub -so 7fa2af80.pub
RUN apt-key add /var/cuda-repo-10-0-local/7fa2af80.pub
# Download the L4T package
WORKDIR /tmp
RUN curl $JETPACK_URL/cuda-repo-l4t-10-0-local-10.0.117_1.0-1_arm64.deb -o cuda-repo-l4t-10-0-local-10.0.117_1.0-1_arm64.deb
RUN dpkg -i cuda-repo-l4t-10-0-local-10.0.117_1.0-1_arm64.deb
RUN rm cuda-repo-l4t-10-0-local-10.0.117_1.0-1_arm64.deb
# Not all are required but the x86_x64 Jetpack installer does it, so we will too
RUN apt update && apt install -y cuda-toolkit-10-0 libgomp1 libfreeimage-dev libopenmpi-dev openmpi-bin
# Install other useful packages
RUN curl $JETPACK_URL/libcudnn7_7.3.1.20-1+cuda10.0_arm64.deb -so libcudnn7_7.3.1.20-1+cuda10.0_arm64.deb
RUN dpkg -i libcudnn7_7.3.1.20-1+cuda10.0_arm64.deb
RUN rm libcudnn7_7.3.1.20-1+cuda10.0_arm64.deb
RUN curl $JETPACK_URL/libcudnn7-dev_7.3.1.20-1+cuda10.0_arm64.deb -so libcudnn7-dev_7.3.1.20-1+cuda10.0_arm64.deb
RUN dpkg -i libcudnn7-dev_7.3.1.20-1+cuda10.0_arm64.deb
RUN rm libcudnn7-dev_7.3.1.20-1+cuda10.0_arm64.deb
RUN curl $JETPACK_URL/libnvinfer5_5.0.3-1+cuda10.0_arm64.deb -so cuda-repo-libnvinfer_arm64.deb
RUN dpkg -i cuda-repo-libnvinfer_arm64.deb
RUN rm cuda-repo-libnvinfer_arm64.deb
RUN curl $JETPACK_URL/libnvinfer-dev_5.0.3-1+cuda10.0_arm64.deb -so libnvinfer-dev_5.0.3-1+cuda10.0_arm64.deb
RUN dpkg -i libnvinfer-dev_5.0.3-1+cuda10.0_arm64.deb
RUN rm libnvinfer-dev_5.0.3-1+cuda10.0_arm64.deb
RUN curl $JETPACK_URL/libnvinfer-samples_5.0.3-1+cuda10.0_all.deb -so libnvinfer-samples_5.0.3-1+cuda10.0_all.deb
RUN dpkg -i libnvinfer-samples_5.0.3-1+cuda10.0_all.deb
RUN rm libnvinfer-samples_5.0.3-1+cuda10.0_all.deb
RUN curl $JETPACK_URL/tensorrt_5.0.3.2-1+cuda10.0_arm64.deb -so cuda-repo-tensorrt_arm64.deb
RUN dpkg -i cuda-repo-tensorrt_arm64.deb
RUN rm cuda-repo-tensorrt_arm64.deb
# Install OpenCV dependencies
RUN apt install -y ffmpeg libgtk2.0-0 libtbb2 libtbb-dev
RUN curl $JETPACK_URL/libopencv_3.3.1_arm64.deb -so libopencv_3.3.1_arm64.deb
RUN dpkg -i libopencv_3.3.1_arm64.deb
RUN rm libopencv_3.3.1_arm64.deb
RUN curl $JETPACK_URL/libopencv-dev_3.3.1_arm64.deb -so libopencv-dev_3.3.1_arm64.deb
RUN dpkg -i libopencv-dev_3.3.1_arm64.deb
RUN rm libopencv-dev_3.3.1_arm64.deb
#COPY Tegra_Linux_Sample-Root-Filesystem_R31.1.0_aarch64.tbz2 /tmp/
WORKDIR /tmp
RUN curl -sL $JETPACK_URL/Jetson_Linux_R31.1.0_aarch64.tbz2 | tar xvfj -
RUN curl -sL $JETPACK_URL/Tegra_Linux_Sample-Root-Filesystem_R31.1.0_aarch64.tbz2 | tar xvfj - -C /tmp/Linux_for_Tegra/rootfs/
#Modified tar options to overwrite files
COPY apply_binaries.sh /tmp/Linux_for_Tegra/
RUN /tmp/Linux_for_Tegra/apply_binaries.sh -r / && rm -fr /tmp/*
# Clean up (don't remove cuda libs... used by child containers)
RUN apt-get -y autoremove && apt-get -y autoclean
RUN rm -rf /var/cache/apt
ENV LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64
RUN export LD_LIBRARY_PATH=$LD_LIBRARY_PATH
WORKDIR /home/nvidia
run_docker.sh
#!/bin/sh
HOST_IP=`hostname -I | awk '{print $1}'`
REPOSITORY='xavier'
JETPACK_VERSION='4.4.1'
CODE_NAME='bionic'
TAG="test"
# run container
xhost +local:root
docker run -it \
--device /dev/nvhost-as-gpu \
--device /dev/nvhost-ctrl \
--device /dev/nvhost-ctrl-gpu \
--device /dev/nvhost-ctxsw-gpu \
--device /dev/nvhost-dbg-gpu \
--device /dev/nvhost-gpu \
--device /dev/nvhost-prof-gpu \
--device /dev/nvhost-sched-gpu \
--device /dev/nvhost-tsg-gpu \
--device /dev/nvmap \
--device /dev/snd \
--net=host \
-e DISPLAY \
-v /dev/shm:/dev/shm \
-v /etc/localtime:/etc/localtime:ro \
-v /tmp/.X11-unix:/tmp/.X11-unix \
-v /usr/local/cuda/lib64:/usr/local/cuda/lib64 \
--rm \
--name jetson-agx-${TAG} \
${REPOSITORY}:${TAG}
xhost -local:root