Realize prediction with .trt model (Jetson Nano 2gb)

Description

I have succeded to extract a prediction score from onnx model on my jetson nano 2gh as follows:

Include libraries

import cv2
import matplotlib.pyplot as plt
import tf2onnx
import numpy as np
import onnxruntime as rt

Convert tensorflow 2 keras 2 onnx model

spec = (tf.TensorSpec((1, 300, 300, 1), tf.float32, name=“input”),)
output_path = model.name + “.onnx”
model_proto, _ = tf2onnx.convert.from_keras(model, input_signature=spec, opset=13, output_path=output_path)
output_names = [n.name for n in model_proto.graph.output]
print(output_names)

load and deploy .onnx model

output_names = [‘dense_1’]
image_shape = (300,300,1)
output_path = “sequential.onnx”
providers = [‘CPUExecutionProvider’]
m = rt.InferenceSession(output_path, providers=providers)

img_path = ‘Test/cast_ok_0_35.jpeg’
img_pred = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
print(img_pred.shape)
img_pred = cv2.resize(img_pred,(300,300))
img_pred = img_pred / 255 # rescale
onnx_pred = m.run(output_names, {“input”: img_pred.reshape(1,*image_shape).astype(np.float32)})

print(onnx_pred)
if onnx_pred[0] < 0.5:
predicted_label = “defected”
prob = (1-onnx_pred[0].sum()) * 100
color = (255, 0, 0)
else:
predicted_label = “good”
prob = onnx_pred[0].sum() * 100
color = (0, 255, 0)

plt.figure(figsize=(32,18))
img = cv2.imread(img_path)
img = cv2.resize(img,(300,300))
cv2.putText(img=img, text=f"Result : {predicted_label}“, org=(10, 30), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.8, color=color, thickness=2)
cv2.putText(img=img, text=f"Probability : {‘{:.3f}’.format(prob)}%”, org=(10, 280), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.7, color=(0, 0, 255), thickness=2)
plt.imshow(img,cmap=‘gray’)
plt.axis(‘off’)
plt.show()

after converting onnx model to .trt engine (using trtexec) i cannot find a straightforwad method to mimic the same prediction using tensorrt

convert and check trt engine

import os
import tensorrt as trt

def load_engine(trt_runtime, engine_path):
trt.init_libnvinfer_plugins(None, “”)### Try to add here
with open(engine_path, ‘rb’) as f:
engine_data = f.read()
engine = trt_runtime.deserialize_cuda_engine(engine_data)
return engine

TRT_LOGGER = trt.Logger(trt.Logger.WARNING)
trt_runtime = trt.Runtime(TRT_LOGGER)
trt_engine_path = “casting_engine.trt”
trt_engine = load_engine(trt_runtime, trt_engine_path)

if trt_engine is not None:
print(“Success”)
else:
print(“Failed”)

extract prediction score fron .trt model

???

any help would be appreciated. thank you

Hi,

This looks like a Jetson issue. Please refer to the below samples in case useful.

For any further assistance, we will move this post to to Jetson related forum.

Thanks!

All this examples uses pre-trained model like imagenet, there are no inference example to extract prediction score as what i have thanks