Hi AastLLL,
I followed the below link and by using the command “python3.6 cconvert_to_uff.py --input-file /home/jetbot/models/research/object_detection/inference_graph/frozen_inference_graph.pb -O Postprocessor -p config.py” got the .uff file. But this .uff file is just 440 bytes, where as the frozen_inference_graph.pb file is 52.4MB. Attached below the config file which I used. Let me know what is wrong here.
https://devtalk.nvidia.com/default/topic/1056054/jetson-tx2/how-to-retrain-ssd_inception_v2_coco_2017_11_17-from-the-tensorrt-samples/2
Config.py File:-
import graphsurgeon as gs
import tensorflow as tf
path = ‘/home/jetbot/models/research/object_detection/inference_graph/frozen_inference_graph.pb’
TRTbin = ‘TRT_ssd_mobilenet_v1_coco_2018_01_28.bin’
output_name = [‘Postprocessor’]
dims = [3,300,300]
layout = 7
Input = gs.create_plugin_node(
name=“Input”,
op=“Placeholder”,
shape=[1, 3, 300, 300]
)
PriorBox = gs.create_plugin_node(
name=“MultipleGridAnchorGenerator”,
op=“GridAnchor_TRT”,
minSize=0.2,
maxSize=0.95,
aspectRatios=[1.0, 2.0, 0.5, 3.0, 0.33],
variance=[0.1,0.1,0.2,0.2],
featureMapShapes=[19, 10, 5, 3, 2, 1],
numLayers=6
)
Postprocessor = gs.create_plugin_node(
name=“Postprocessor”,
op=“NMS_TRT”,
shareLocation=1,
varianceEncodedInTarget=0,
backgroundLabelId=0,
confidenceThreshold=1e-8,
nmsThreshold=0.6,
topK=100,
keepTopK=100,
numClasses=7,
inputOrder=[0, 2, 1],
confSigmoid=1,
isNormalized=1,
scoreConverter=“SIGMOID”
)
concat_priorbox = gs.create_plugin_node(
“concat_priorbox”,
op=“ConcatV2”,
axis=2
)
concat_box_loc = gs.create_plugin_node(
“concat_box_loc”,
op=“FlattenConcat_TRT”,
)
concat_box_conf = gs.create_plugin_node(
“concat_box_conf”,
op=“FlattenConcat_TRT”,
)
namespace_plugin_map = {
“MultipleGridAnchorGenerator”: PriorBox,
“Postprocessor”: Postprocessor,
“Preprocessor”: Input,
“ToFloat”: Input,
“image_tensor”: Input,
“MultipleGridAnchorGenerator/Concatenate”: concat_priorbox,
“concat”: concat_box_loc,
“concat_1”: concat_box_conf
}
def preprocess(graph):
all_assert_nodes = graph.find_nodes_by_op(“Assert”)
graph.remove(all_assert_nodes, remove_exclusive_dependencies=True)
all_identity_nodes = graph.find_nodes_by_op(“Identity”)
graph.forward_inputs(all_identity_nodes)
print(" Operation done ")
graph.collapse_namespaces(namespace_plugin_map)
graph.remove(graph.graph_outputs, remove_exclusive_dependencies=False)
#graph.find_nodes_by_op(“NMS_TRT”)[0].input.remove(“Input”)
#graph.find_nodes_by_name(“Input”)[0].input.remove(“image_tensor:0”)