def main():
net = jetson.inference.detectNet('googlenet', [
'--model={}'.format(os.path.join(MODEL_DIR, 'ssd-mobilenet.onnx')),
'--labels={}'.format(os.path.join(MODEL_DIR, 'labels.txt')),
'--input-blob=input_0', '--output-cvg=scores', '--output-bbox=boxes'
])
item = 0
speed = 0.0
camera = jetson.utils.videoSource(CAMERA_PATH)
timeout=1000
info = jetson.utils.cudaFont()
dt = time.time()
close_state = False
timer = time.time()
try:
while True:
img = camera.Capture()
#img = camera.Capture()
#print(img)
detections = net.Detect(img)
#detections = net.Detect(img, width, height)
#print(detections)
# Define new list items
items = []
for detection in detections:
# turns ClassID into string
item = net.GetClassDesc(detection.ClassID)
# Add detected item to items list
items.append(item)
info.OverlayText(img, 5, 5, "speed:{:.2f}".format(speed), int(
detection.Left)+5, int(detection.Top)+35, info.White, info.Gray40)
# If animation show for while
if gif_thread.current_state != "default":
if (time.time() - timer) > GIF_TIME:
gif_thread.set_state("default")
if close_state:
if (time.time() - dt) > CLOSE_EYE_TIME:
gif_thread.set_state("mad")
play_alarm()
if "ClosedEyes" in items or "LookingDown" in items:
if not close_state:
close_state = True
dt = time.time()
elif "OpenEyes" in items:
close_state = False
gif_thread.set_state("default")
output.Render(img)
output.SetStatus(
"Object Detection | Network {:.0f} FPS".format(
net.GetNetworkFPS()
)
)
# print out performance info
# net.PrintProfilerTimes()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print(e)
finally:
sys.exit(0)
[TRT] TensorRT version 8.0.1
[TRT] loading NVIDIA plugins...
[TRT] Registered plugin creator - ::GridAnchor_TRT version 1
[TRT] Registered plugin creator - ::GridAnchorRect_TRT version 1
[TRT] Registered plugin creator - ::NMS_TRT version 1
[TRT] Registered plugin creator - ::Reorg_TRT version 1
[TRT] Registered plugin creator - ::Region_TRT version 1
[TRT] Registered plugin creator - ::Clip_TRT version 1
[TRT] Registered plugin creator - ::LReLU_TRT version 1
[TRT] Registered plugin creator - ::PriorBox_TRT version 1
[TRT] Registered plugin creator - ::Normalize_TRT version 1
[TRT] Registered plugin creator - ::ScatterND version 1
[TRT] Registered plugin creator - ::RPROI_TRT version 1
[TRT] Registered plugin creator - ::BatchedNMS_TRT version 1
[TRT] Registered plugin creator - ::BatchedNMSDynamic_TRT version 1
[TRT] Could not register plugin creator - ::FlattenConcat_TRT version 1
[TRT] Registered plugin creator - ::CropAndResize version 1
[TRT] Registered plugin creator - ::DetectionLayer_TRT version 1
[TRT] Registered plugin creator - ::EfficientNMS_ONNX_TRT version 1
[TRT] Registered plugin creator - ::EfficientNMS_TRT version 1
[TRT] Registered plugin creator - ::Proposal version 1
[TRT] Registered plugin creator - ::ProposalLayer_TRT version 1
[TRT] Registered plugin creator - ::PyramidROIAlign_TRT version 1
[TRT] Registered plugin creator - ::ResizeNearest_TRT version 1
[TRT] Registered plugin creator - ::Split version 1
[TRT] Registered plugin creator - ::SpecialSlice_TRT version 1
[TRT] Registered plugin creator - ::InstanceNormalization_TRT version 1
[TRT] detected model format - ONNX (extension '.onnx')
[TRT] desired precision specified for GPU: FASTEST
[TRT] requested fasted precision for device GPU without providing valid calibrator, disabling INT8
[TRT] [MemUsageChange] Init CUDA: CPU +197, GPU +0, now: CPU 234, GPU 3386 (MiB)
[TRT] native precisions detected for GPU: FP32, FP16
[TRT] selecting fastest native precision for GPU: FP16
[TRT] attempting to open engine cache file /home/arctic/Downloads/work/model/ssd-mobilenet.onnx.1.1.8001.GPU.FP16.engine
[TRT] loading network plan from engine cache... /home/arctic/Downloads/work/model/ssd-mobilenet.onnx.1.1.8001.GPU.FP16.engine
[TRT] device GPU, loaded /home/arctic/Downloads/work/model/ssd-mobilenet.onnx
[TRT] [MemUsageChange] Init CUDA: CPU +0, GPU +0, now: CPU 251, GPU 3420 (MiB)
[TRT] Loaded engine size: 16 MB
[TRT] [MemUsageSnapshot] deserializeCudaEngine begin: CPU 251 MiB, GPU 3420 MiB
[TRT] Using an engine plan file across different models of devices is not recommended and is likely to affect performance or even cause errors.
[TRT] Using cublas a tactic source
[TRT] [MemUsageChange] Init cuBLAS/cuBLASLt: CPU +158, GPU +258, now: CPU 409, GPU 3678 (MiB)
[TRT] Using cuDNN as a tactic source
[TRT] [MemUsageChange] Init cuDNN: CPU +241, GPU +194, now: CPU 650, GPU 3872 (MiB)
[TRT] [MemUsageChange] Init cuBLAS/cuBLASLt: CPU +0, GPU +0, now: CPU 650, GPU 3873 (MiB)
[TRT] Deserialization required 5192730 microseconds.
[TRT] [MemUsageSnapshot] deserializeCudaEngine end: CPU 650 MiB, GPU 3873 MiB
[TRT] [MemUsageSnapshot] ExecutionContext creation begin: CPU 650 MiB, GPU 3873 MiB
[TRT] Using cublas a tactic source
[TRT] [MemUsageChange] Init cuBLAS/cuBLASLt: CPU +0, GPU +0, now: CPU 650, GPU 3873 (MiB)
[TRT] Using cuDNN as a tactic source
[TRT] [MemUsageChange] Init cuDNN: CPU +0, GPU +0, now: CPU 650, GPU 3873 (MiB)
[TRT] Total per-runner device memory is 14449152
[TRT] Total per-runner host memory is 69856
[TRT] Allocated activation device memory of size 9439744
[TRT] [MemUsageSnapshot] ExecutionContext creation end: CPU 652 MiB, GPU 3892 MiB
[TRT]
[TRT] CUDA engine context initialized on device GPU:
[TRT] -- layers 102
[TRT] -- maxBatchSize 1
[TRT] -- deviceMemory 9439744
[TRT] -- bindings 3
[TRT] binding 0
-- index 0
-- name 'input_0'
-- type FP32
-- in/out INPUT
-- # dims 4
-- dim #0 1
-- dim #1 3
-- dim #2 300
-- dim #3 300
[TRT] binding 1
-- index 1
-- name 'scores'
-- type FP32
-- in/out OUTPUT
-- # dims 3
-- dim #0 1
-- dim #1 3000
-- dim #2 5
[TRT] binding 2
-- index 2
-- name 'boxes'
-- type FP32
-- in/out OUTPUT
-- # dims 3
-- dim #0 1
-- dim #1 3000
-- dim #2 4
[TRT]
[TRT] binding to input 0 input_0 binding index: 0
[TRT] binding to input 0 input_0 dims (b=1 c=3 h=300 w=300) size=1080000
[TRT] binding to output 0 scores binding index: 1
[TRT] binding to output 0 scores dims (b=1 c=3000 h=5 w=1) size=60000
[TRT] binding to output 1 boxes binding index: 2
[TRT] binding to output 1 boxes dims (b=1 c=3000 h=4 w=1) size=48000
[TRT]
[TRT] device GPU, /home/arctic/Downloads/work/model/ssd-mobilenet.onnx initialized.
[TRT] detectNet -- number object classes: 5
[TRT] detectNet -- maximum bounding boxes: 3000
[TRT] detectNet -- loaded 5 class info entries
[TRT] detectNet -- number of object classes: 5
[gstreamer] initialized gstreamer, version 1.14.5.0
[gstreamer] gstCamera -- attempting to create device v4l2:///dev/video1
(python:24882): GStreamer-CRITICAL **: 17:40:08.131: gst_element_message_full_with_details: assertion 'GST_IS_ELEMENT (element)' failed
(python:24882): GStreamer-CRITICAL **: 17:40:08.132: gst_element_message_full_with_details: assertion 'GST_IS_ELEMENT (element)' failed
(python:24882): GStreamer-CRITICAL **: 17:40:08.132: gst_element_message_full_with_details: assertion 'GST_IS_ELEMENT (element)' failed
[gstreamer] gstCamera -- found v4l2 device: Logitech HD Webcam C270
[gstreamer] v4l2-proplist, device.path=(string)/dev/video1, udev-probed=(boolean)false, device.api=(string)v4l2, v4l2.device.driver=(string)uvcvideo, v4l2.device.card=(string)"Logitech\ HD\ Webcam\ C270", v4l2.device.bus_info=(string)usb-70090000.xusb-2.2, v4l2.device.version=(uint)264701, v4l2.device.capabilities=(uint)2216689665, v4l2.device.device_caps=(uint)69206017;
[gstreamer] gstCamera -- found 15 caps for v4l2 device /dev/video1
[gstreamer] [0] video/x-raw, format=(string)YUY2, width=(int)1280, height=(int)960, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)5/1;
[gstreamer] [1] video/x-raw, format=(string)YUY2, width=(int)1280, height=(int)960, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)5/1;
[gstreamer] [2] video/x-raw, format=(string)YUY2, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)5/1;
[gstreamer] [3] video/x-raw, format=(string)YUY2, width=(int)1024, height=(int)768, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)5/1;
[gstreamer] [4] video/x-raw, format=(string)YUY2, width=(int)800, height=(int)600, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)20/1;
[gstreamer] [5] video/x-raw, format=(string)YUY2, width=(int)640, height=(int)480, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [6] video/x-raw, format=(string)YUY2, width=(int)320, height=(int)240, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)30/1;
[gstreamer] [7] image/jpeg, width=(int)1280, height=(int)960, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [8] image/jpeg, width=(int)1280, height=(int)960, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [9] image/jpeg, width=(int)1280, height=(int)720, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [10] image/jpeg, width=(int)1024, height=(int)768, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [11] image/jpeg, width=(int)800, height=(int)600, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [12] image/jpeg, width=(int)640, height=(int)480, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 25/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [13] image/jpeg, width=(int)640, height=(int)360, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] [14] image/jpeg, width=(int)320, height=(int)240, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction){ 30/1, 20/1, 15/1, 10/1, 5/1 };
[gstreamer] gstCamera -- selected device profile: codec=mjpeg format=unknown width=1280 height=720
[gstreamer] gstCamera pipeline string:
[gstreamer] v4l2src device=/dev/video1 ! image/jpeg, width=(int)1280, height=(int)720 ! jpegdec ! video/x-raw ! appsink name=mysink
[gstreamer] gstCamera successfully created device v4l2:///dev/video1
[video] created gstCamera from v4l2:///dev/video1
------------------------------------------------
gstCamera video options:
------------------------------------------------
-- URI: v4l2:///dev/video1
- protocol: v4l2
- location: /dev/video1
- port: 1
-- deviceType: v4l2
-- ioType: input
-- codec: mjpeg
-- width: 1280
-- height: 720
-- frameRate: 30.000000
-- bitRate: 0
-- numBuffers: 4
-- zeroCopy: true
-- flipMethod: none
-- loop: 0
-- rtspLatency 2000
------------------------------------------------
[gstreamer] opening gstCamera for streaming, transitioning pipeline to GST_STATE_PLAYING
[gstreamer] gstreamer changed state from NULL to READY ==> mysink
[gstreamer] gstreamer changed state from NULL to READY ==> capsfilter1
[gstreamer] gstreamer changed state from NULL to READY ==> jpegdec0
[gstreamer] gstreamer changed state from NULL to READY ==> capsfilter0
[gstreamer] gstreamer changed state from NULL to READY ==> v4l2src0
[gstreamer] gstreamer changed state from NULL to READY ==> pipeline0
[gstreamer] gstreamer changed state from READY to PAUSED ==> capsfilter1
[gstreamer] gstreamer changed state from READY to PAUSED ==> jpegdec0
[gstreamer] gstreamer changed state from READY to PAUSED ==> capsfilter0
[gstreamer] gstreamer stream status CREATE ==> src
[gstreamer] gstreamer changed state from READY to PAUSED ==> v4l2src0
[gstreamer] gstreamer changed state from READY to PAUSED ==> pipeline0
[gstreamer] gstreamer message new-clock ==> pipeline0
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> capsfilter1
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> jpegdec0
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> capsfilter0
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> v4l2src0
[gstreamer] gstreamer stream status ENTER ==> src
[gstreamer] gstreamer message stream-start ==> pipeline0
[gstreamer] gstCamera -- onPreroll
[gstreamer] gstCamera -- map buffer size was less than max size (1382400 vs 1382407)
[gstreamer] gstCamera recieve caps: video/x-raw, format=(string)I420, width=(int)1280, height=(int)720, interlace-mode=(string)progressive, multiview-mode=(string)mono, multiview-flags=(GstVideoMultiviewFlagsSet)0:ffffffff:/right-view-first/left-flipped/left-flopped/right-flipped/right-flopped/half-aspect/mixed-mono, pixel-aspect-ratio=(fraction)1/1, chroma-site=(string)mpeg2, colorimetry=(string)1:4:0:0, framerate=(fraction)30/1
[gstreamer] gstCamera -- recieved first frame, codec=mjpeg format=i420 width=1280 height=720 size=1382407
RingBuffer -- allocated 4 buffers (1382407 bytes each, 5529628 bytes total)
[gstreamer] gstreamer changed state from READY to PAUSED ==> mysink
[gstreamer] gstreamer message async-done ==> pipeline0
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> mysink
[gstreamer] gstreamer changed state from PAUSED to PLAYING ==> pipeline0
RingBuffer -- allocated 4 buffers (2764800 bytes each, 11059200 bytes total)
[OpenGL] creating 1280x720 texture (GL_RGB8 format, 2764800 bytes)
Then opens and displays a black screen.
I’ve seen a github issue, recommending to use this piece of code, but i’m not sure how to impliment it
if( !camera->ConvertRGBA(imgCUDA, &imgRGBA, true) )
/*void* tex_map = texture->MapCUDA();
if( tex_map != NULL )
{
cudaMemcpy(tex_map, imgRGBA, texture->GetSize(), cudaMemcpyDeviceToDevice);
texture->Unmap();
}*/
CUDA(cudaDeviceSynchronize());
texture->UploadCPU(imageRGBA);